Spaces:
Sleeping
Sleeping
KUNAL SHAW
commited on
Commit
·
c4e05ba
1
Parent(s):
123d70b
Switch to llama-3.1-8b-instant (mixtral was deprecated)
Browse files
app.py
CHANGED
|
@@ -360,8 +360,8 @@ def search_similar_questions(question: str) -> list:
|
|
| 360 |
# Step 3 - Custom LLM
|
| 361 |
from openai import OpenAI
|
| 362 |
|
| 363 |
-
# Get model name from environment or use Groq's mixtral
|
| 364 |
-
LLM_MODEL = os.environ.get("LLM_MODEL", "
|
| 365 |
|
| 366 |
def generate_stream(prompt, model=None):
|
| 367 |
# Use environment variables for flexibility (OpenAI, Groq, or Custom HF Endpoint)
|
|
|
|
| 360 |
# Step 3 - Custom LLM
|
| 361 |
from openai import OpenAI
|
| 362 |
|
| 363 |
+
# Get model name from environment or use Groq's llama model (mixtral was deprecated)
|
| 364 |
+
LLM_MODEL = os.environ.get("LLM_MODEL", "llama-3.1-8b-instant")
|
| 365 |
|
| 366 |
def generate_stream(prompt, model=None):
|
| 367 |
# Use environment variables for flexibility (OpenAI, Groq, or Custom HF Endpoint)
|