tejaskkkk commited on
Commit
6255540
·
verified ·
1 Parent(s): 02ce6fb

updated for groq api

Browse files
Files changed (1) hide show
  1. utils.py +17 -32
utils.py CHANGED
@@ -14,28 +14,20 @@ load_dotenv()
14
 
15
  logger = logging.getLogger("swayam-chatbot")
16
 
17
- # Initialize Together client with proper error handling and version compatibility
18
  try:
19
- # Try different import patterns for different versions of together library
20
- try:
21
- from together import Together
22
- except ImportError:
23
- try:
24
- from together.client import Together
25
- except ImportError:
26
- import together
27
- Together = together.Together
28
 
29
  # Try to get API key from environment directly as a fallback
30
- api_key = config.TOGETHER_API_KEY or os.environ.get("TOGETHER_API_KEY")
31
  if not api_key:
32
- logger.warning("No Together API key found. LLM functionality will not work.")
33
  client = None
34
  else:
35
- client = Together(api_key=api_key)
36
- logger.info("Together client initialized successfully")
37
  except Exception as e:
38
- logger.error(f"Failed to initialize Together client: {e}")
39
  client = None
40
 
41
  # Function for mean pooling to get sentence embeddings
@@ -164,33 +156,25 @@ def get_relevant_context(query, top_k=3):
164
 
165
  return "\n\n".join(context_parts)
166
 
167
- @traceable(run_type="llm", name="Together AI LLM")
168
  def get_llm_response(messages):
169
- """Get response from LLM using Together API"""
170
  if client is None:
171
- logger.error("Together client not initialized. Cannot get LLM response.")
172
  return "Sorry, I cannot access the language model at the moment. Please ensure the API key is set correctly."
173
 
174
  try:
175
  response = client.chat.completions.create(
176
  model=config.MODEL_NAME,
177
- messages=messages
 
 
 
 
178
  )
179
  return response.choices[0].message.content
180
- except AttributeError:
181
- # Handle older version of together library
182
- try:
183
- response = client.completions.create(
184
- model=config.MODEL_NAME,
185
- prompt=messages[-1]["content"],
186
- max_tokens=1000
187
- )
188
- return response.choices[0].text
189
- except Exception as e:
190
- logger.error(f"Error with fallback API call: {e}")
191
- return "Sorry, I encountered an error while processing your request."
192
  except Exception as e:
193
- logger.error(f"Error calling LLM API: {e}")
194
  return "Sorry, I encountered an error while processing your request."
195
 
196
  @traceable(run_type="chain", name="Response Generator")
@@ -217,3 +201,4 @@ def generate_response(query):
217
 
218
  response = get_llm_response(messages)
219
  return {"response": response, "type": "general"}
 
 
14
 
15
  logger = logging.getLogger("swayam-chatbot")
16
 
17
+ # Initialize Groq client with proper error handling
18
  try:
19
+ from groq import Groq
 
 
 
 
 
 
 
 
20
 
21
  # Try to get API key from environment directly as a fallback
22
+ api_key = config.GROQ_API_KEY or os.environ.get("GROQ_API_KEY")
23
  if not api_key:
24
+ logger.warning("No Groq API key found. LLM functionality will not work.")
25
  client = None
26
  else:
27
+ client = Groq(api_key=api_key)
28
+ logger.info("Groq client initialized successfully")
29
  except Exception as e:
30
+ logger.error(f"Failed to initialize Groq client: {e}")
31
  client = None
32
 
33
  # Function for mean pooling to get sentence embeddings
 
156
 
157
  return "\n\n".join(context_parts)
158
 
159
+ @traceable(run_type="llm", name="Groq LLM")
160
  def get_llm_response(messages):
161
+ """Get response from LLM using Groq API"""
162
  if client is None:
163
+ logger.error("Groq client not initialized. Cannot get LLM response.")
164
  return "Sorry, I cannot access the language model at the moment. Please ensure the API key is set correctly."
165
 
166
  try:
167
  response = client.chat.completions.create(
168
  model=config.MODEL_NAME,
169
+ messages=messages,
170
+ temperature=0.7,
171
+ max_completion_tokens=1024,
172
+ top_p=1,
173
+ stream=False
174
  )
175
  return response.choices[0].message.content
 
 
 
 
 
 
 
 
 
 
 
 
176
  except Exception as e:
177
+ logger.error(f"Error calling Groq LLM API: {e}")
178
  return "Sorry, I encountered an error while processing your request."
179
 
180
  @traceable(run_type="chain", name="Response Generator")
 
201
 
202
  response = get_llm_response(messages)
203
  return {"response": response, "type": "general"}
204
+