Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,7 +7,6 @@ from langchain_community.vectorstores import FAISS
|
|
| 7 |
import requests
|
| 8 |
import os
|
| 9 |
import time
|
| 10 |
-
import base64
|
| 11 |
|
| 12 |
# Page configuration
|
| 13 |
st.set_page_config(
|
|
@@ -124,10 +123,9 @@ def check_token_validity():
|
|
| 124 |
except:
|
| 125 |
return False
|
| 126 |
|
| 127 |
-
def query_hf_inference_api(prompt, max_tokens=200):
|
| 128 |
"""Query Hugging Face Inference API with better error handling"""
|
| 129 |
-
|
| 130 |
-
API_URL = f"https://api-inference.huggingface.co/models/{MODEL}"
|
| 131 |
headers = {"Authorization": f"Bearer {os.getenv('HF_API_KEY')}"} if os.getenv('HF_API_KEY') else {}
|
| 132 |
|
| 133 |
payload = {
|
|
@@ -155,10 +153,10 @@ def query_hf_inference_api(prompt, max_tokens=200):
|
|
| 155 |
<h4>403 Forbidden Error</h4>
|
| 156 |
<p>Token is set: <strong>{'Yes' if os.getenv('HF_API_KEY') else 'No'}</strong></p>
|
| 157 |
<p>Token valid: <strong>{'Yes' if st.session_state.token_valid else 'No'}</strong></p>
|
| 158 |
-
<p>Model: {
|
| 159 |
<p>Possible solutions:</p>
|
| 160 |
<ol>
|
| 161 |
-
<li>Visit the <a href="https://huggingface.co/
|
| 162 |
<li>Ensure your token has "read" permissions</li>
|
| 163 |
<li>Wait 5-10 minutes after accepting terms</li>
|
| 164 |
<li>Try a different model using the dropdown below</li>
|
|
@@ -171,7 +169,7 @@ def query_hf_inference_api(prompt, max_tokens=200):
|
|
| 171 |
elif response.status_code == 429:
|
| 172 |
st.warning("Rate limit exceeded. Waiting and retrying...")
|
| 173 |
time.sleep(3)
|
| 174 |
-
return query_hf_inference_api(prompt, max_tokens)
|
| 175 |
|
| 176 |
else:
|
| 177 |
st.error(f"API Error {response.status_code}: {response.text[:200]}")
|
|
@@ -304,7 +302,7 @@ with st.expander("🔧 Debug Information", expanded=False):
|
|
| 304 |
<div class="info">
|
| 305 |
<p>Your token is set but we're still having issues. Try these steps:</p>
|
| 306 |
<ol>
|
| 307 |
-
<li>Visit the
|
| 308 |
<li>Click "Agree and access repository"</li>
|
| 309 |
<li>Wait 5-10 minutes for changes to propagate</li>
|
| 310 |
<li>Try a different model from the dropdown</li>
|
|
@@ -324,10 +322,15 @@ with st.expander("🔧 Debug Information", expanded=False):
|
|
| 324 |
</div>
|
| 325 |
""", unsafe_allow_html=True)
|
| 326 |
|
| 327 |
-
# PDF Upload Section
|
| 328 |
with st.container():
|
| 329 |
st.subheader("📤 Upload Your Textbook/Notes")
|
| 330 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 331 |
|
| 332 |
# Main content
|
| 333 |
if pdf_file:
|
|
@@ -415,6 +418,6 @@ if pdf_file:
|
|
| 415 |
st.markdown("---")
|
| 416 |
st.markdown("""
|
| 417 |
<div style="text-align: center; padding: 20px;">
|
| 418 |
-
Built with ❤️ for students | PDF Study Assistant v4.
|
| 419 |
</div>
|
| 420 |
""", unsafe_allow_html=True)
|
|
|
|
| 7 |
import requests
|
| 8 |
import os
|
| 9 |
import time
|
|
|
|
| 10 |
|
| 11 |
# Page configuration
|
| 12 |
st.set_page_config(
|
|
|
|
| 123 |
except:
|
| 124 |
return False
|
| 125 |
|
| 126 |
+
def query_hf_inference_api(prompt, max_tokens=200, model="google/flan-t5-base"):
|
| 127 |
"""Query Hugging Face Inference API with better error handling"""
|
| 128 |
+
API_URL = f"https://api-inference.huggingface.co/models/{model}"
|
|
|
|
| 129 |
headers = {"Authorization": f"Bearer {os.getenv('HF_API_KEY')}"} if os.getenv('HF_API_KEY') else {}
|
| 130 |
|
| 131 |
payload = {
|
|
|
|
| 153 |
<h4>403 Forbidden Error</h4>
|
| 154 |
<p>Token is set: <strong>{'Yes' if os.getenv('HF_API_KEY') else 'No'}</strong></p>
|
| 155 |
<p>Token valid: <strong>{'Yes' if st.session_state.token_valid else 'No'}</strong></p>
|
| 156 |
+
<p>Model: {model}</p>
|
| 157 |
<p>Possible solutions:</p>
|
| 158 |
<ol>
|
| 159 |
+
<li>Visit the <a href="https://huggingface.co/{model}" target="_blank">model page</a> and click "Agree and access repository"</li>
|
| 160 |
<li>Ensure your token has "read" permissions</li>
|
| 161 |
<li>Wait 5-10 minutes after accepting terms</li>
|
| 162 |
<li>Try a different model using the dropdown below</li>
|
|
|
|
| 169 |
elif response.status_code == 429:
|
| 170 |
st.warning("Rate limit exceeded. Waiting and retrying...")
|
| 171 |
time.sleep(3)
|
| 172 |
+
return query_hf_inference_api(prompt, max_tokens, model)
|
| 173 |
|
| 174 |
else:
|
| 175 |
st.error(f"API Error {response.status_code}: {response.text[:200]}")
|
|
|
|
| 302 |
<div class="info">
|
| 303 |
<p>Your token is set but we're still having issues. Try these steps:</p>
|
| 304 |
<ol>
|
| 305 |
+
<li>Visit the model page for your selected model</li>
|
| 306 |
<li>Click "Agree and access repository"</li>
|
| 307 |
<li>Wait 5-10 minutes for changes to propagate</li>
|
| 308 |
<li>Try a different model from the dropdown</li>
|
|
|
|
| 322 |
</div>
|
| 323 |
""", unsafe_allow_html=True)
|
| 324 |
|
| 325 |
+
# PDF Upload Section (FIXED LABEL ERROR)
|
| 326 |
with st.container():
|
| 327 |
st.subheader("📤 Upload Your Textbook/Notes")
|
| 328 |
+
# Fixed empty label issue by adding a space and hiding it
|
| 329 |
+
pdf_file = st.file_uploader(
|
| 330 |
+
"Upload PDF",
|
| 331 |
+
type="pdf",
|
| 332 |
+
label_visibility="collapsed"
|
| 333 |
+
)
|
| 334 |
|
| 335 |
# Main content
|
| 336 |
if pdf_file:
|
|
|
|
| 418 |
st.markdown("---")
|
| 419 |
st.markdown("""
|
| 420 |
<div style="text-align: center; padding: 20px;">
|
| 421 |
+
Built with ❤️ for students | PDF Study Assistant v4.1
|
| 422 |
</div>
|
| 423 |
""", unsafe_allow_html=True)
|