nagarmayank commited on
Commit
1ca9acb
·
1 Parent(s): 09bcb2a

Integrate OpikTracer into ChatGoogleGenerativeAI and ChatGroq models; remove unused test endpoint and update requirements

Browse files
Files changed (2) hide show
  1. app.py +3 -9
  2. requirements.txt +3 -2
app.py CHANGED
@@ -19,6 +19,7 @@ import groq
19
  from datetime import datetime
20
  from fastapi import HTTPException
21
  from langchain_google_genai import ChatGoogleGenerativeAI
 
22
 
23
  # Load environment variables - for local development
24
  from dotenv import load_dotenv
@@ -142,20 +143,13 @@ def write_message(data: dict, header: str = Header()):
142
  message = data['message']
143
 
144
  try:
145
- model = ChatGoogleGenerativeAI(model=GOOGLE_MODEL, max_retries=3)
146
  except Exception as e: #fallback model
147
- model = ChatGroq(temperature=1, groq_api_key=GROQ_API_KEY, model_name=GROQ_MODEL)
148
  # model = ChatOllama(model="gemma3:1b", temperature=1)
149
  transaction_bot = Agent(model, system=prompt)
150
  transaction_bot.graph.invoke({"messages": [message]})
151
  return {"message": "Transaction completed successfully"}
152
 
153
- @app.post("/test")
154
- def test(header = Header()):
155
- if header != HF_TOKEN:
156
- raise HTTPException(status_code=400, detail="Invalid header")
157
-
158
- return {"message": "Test successful"}
159
-
160
  if __name__ == "__main__":
161
  uvicorn.run(app, host="0.0.0.0", port=7860, log_level="info")
 
19
  from datetime import datetime
20
  from fastapi import HTTPException
21
  from langchain_google_genai import ChatGoogleGenerativeAI
22
+ from opik.integrations.langchain import OpikTracer
23
 
24
  # Load environment variables - for local development
25
  from dotenv import load_dotenv
 
143
  message = data['message']
144
 
145
  try:
146
+ model = ChatGoogleGenerativeAI(model=GOOGLE_MODEL, max_retries=3, callbacks = [OpikTracer()])
147
  except Exception as e: #fallback model
148
+ model = ChatGroq(model=GROQ_MODEL, temperature=1, callbacks = [OpikTracer()])
149
  # model = ChatOllama(model="gemma3:1b", temperature=1)
150
  transaction_bot = Agent(model, system=prompt)
151
  transaction_bot.graph.invoke({"messages": [message]})
152
  return {"message": "Transaction completed successfully"}
153
 
 
 
 
 
 
 
 
154
  if __name__ == "__main__":
155
  uvicorn.run(app, host="0.0.0.0", port=7860, log_level="info")
requirements.txt CHANGED
@@ -1,9 +1,10 @@
1
  fastapi
2
  uvicorn[standard]
3
- langchain-ollama
4
  langgraph
5
  pygsheets
6
  pandas
7
  langchain-groq
8
  dotenv
9
- langchain-google-genai
 
 
 
1
  fastapi
2
  uvicorn[standard]
 
3
  langgraph
4
  pygsheets
5
  pandas
6
  langchain-groq
7
  dotenv
8
+ langchain-google-genai
9
+ opik
10
+ langchain