Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -31,9 +31,25 @@ torch.backends.cudnn.benchmark = True
|
|
| 31 |
llm_model = torch.compile(llm_model)
|
| 32 |
|
| 33 |
# ๐น Initialize ChromaDB
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
chroma_client = chromadb.PersistentClient(path="./chroma_db")
|
| 35 |
collection = chroma_client.get_or_create_collection(name="hepB_knowledge")
|
| 36 |
|
|
|
|
|
|
|
|
|
|
| 37 |
# ๐น Function to Generate LLM Responses
|
| 38 |
def generate_humanized_response(query, retrieved_text):
|
| 39 |
"""Passes retrieved chunks through Mistral-7B to improve readability."""
|
|
|
|
| 31 |
llm_model = torch.compile(llm_model)
|
| 32 |
|
| 33 |
# ๐น Initialize ChromaDB
|
| 34 |
+
import os
|
| 35 |
+
import zipfile
|
| 36 |
+
|
| 37 |
+
# ๐น Unzip ChromaDB database if not extracted
|
| 38 |
+
if not os.path.exists("./chroma_db"):
|
| 39 |
+
with zipfile.ZipFile("chroma_db.zip", 'r') as zip_ref:
|
| 40 |
+
zip_ref.extractall("./")
|
| 41 |
+
|
| 42 |
+
print("โ
ChromaDB database loaded!")
|
| 43 |
+
|
| 44 |
+
import chromadb
|
| 45 |
+
|
| 46 |
+
# ๐น Load ChromaDB from local storage
|
| 47 |
chroma_client = chromadb.PersistentClient(path="./chroma_db")
|
| 48 |
collection = chroma_client.get_or_create_collection(name="hepB_knowledge")
|
| 49 |
|
| 50 |
+
print("โ
ChromaDB initialized!")
|
| 51 |
+
|
| 52 |
+
|
| 53 |
# ๐น Function to Generate LLM Responses
|
| 54 |
def generate_humanized_response(query, retrieved_text):
|
| 55 |
"""Passes retrieved chunks through Mistral-7B to improve readability."""
|