alaselababatunde commited on
Commit
19a4f78
Β·
1 Parent(s): 3ab22fd
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -5,6 +5,7 @@ from fastapi.responses import JSONResponse
5
  from langchain.prompts import PromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
  from huggingface_hub.utils import HfHubHTTPError
 
8
  from vector import query_vector
9
 
10
  # ==============================
@@ -34,7 +35,7 @@ async def global_exception_handler(request: Request, exc: Exception):
34
  )
35
 
36
  # ==============================
37
- # MODELS PER ENDPOINT (Meta Models)
38
  # ==============================
39
 
40
  # 1. Crop Doctor
@@ -84,8 +85,8 @@ market_llm = HuggingFaceEndpoint(
84
  async def crop_doctor(symptoms: str):
85
  prompt = crop_template.format(symptoms=symptoms)
86
  try:
87
- response = crop_llm.invoke(prompt)
88
- return {"diagnosis": response}
89
  except HfHubHTTPError as e:
90
  return {"error": f"HuggingFace error: {str(e)}"}
91
 
@@ -93,8 +94,8 @@ async def crop_doctor(symptoms: str):
93
  async def multilingual_chat(query: str):
94
  prompt = chat_template.format(query=query)
95
  try:
96
- response = chat_llm.invoke(prompt)
97
- return {"reply": response}
98
  except HfHubHTTPError as e:
99
  return {"error": f"HuggingFace error: {str(e)}"}
100
 
@@ -102,8 +103,8 @@ async def multilingual_chat(query: str):
102
  async def disaster_summarizer(report: str):
103
  prompt = disaster_template.format(report=report)
104
  try:
105
- response = disaster_llm.invoke(prompt)
106
- return {"summary": response}
107
  except HfHubHTTPError as e:
108
  return {"error": f"HuggingFace error: {str(e)}"}
109
 
@@ -111,8 +112,8 @@ async def disaster_summarizer(report: str):
111
  async def marketplace(product: str):
112
  prompt = market_template.format(product=product)
113
  try:
114
- response = market_llm.invoke(prompt)
115
- return {"recommendation": response}
116
  except HfHubHTTPError as e:
117
  return {"error": f"HuggingFace error: {str(e)}"}
118
 
 
5
  from langchain.prompts import PromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
  from huggingface_hub.utils import HfHubHTTPError
8
+ from langchain.schema import HumanMessage # βœ… For conversational inputs
9
  from vector import query_vector
10
 
11
  # ==============================
 
35
  )
36
 
37
  # ==============================
38
+ # MODELS PER ENDPOINT (Meta Models, Conversational)
39
  # ==============================
40
 
41
  # 1. Crop Doctor
 
85
  async def crop_doctor(symptoms: str):
86
  prompt = crop_template.format(symptoms=symptoms)
87
  try:
88
+ response = crop_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
89
+ return {"diagnosis": str(response)}
90
  except HfHubHTTPError as e:
91
  return {"error": f"HuggingFace error: {str(e)}"}
92
 
 
94
  async def multilingual_chat(query: str):
95
  prompt = chat_template.format(query=query)
96
  try:
97
+ response = chat_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
98
+ return {"reply": str(response)}
99
  except HfHubHTTPError as e:
100
  return {"error": f"HuggingFace error: {str(e)}"}
101
 
 
103
  async def disaster_summarizer(report: str):
104
  prompt = disaster_template.format(report=report)
105
  try:
106
+ response = disaster_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
107
+ return {"summary": str(response)}
108
  except HfHubHTTPError as e:
109
  return {"error": f"HuggingFace error: {str(e)}"}
110
 
 
112
  async def marketplace(product: str):
113
  prompt = market_template.format(product=product)
114
  try:
115
+ response = market_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
116
+ return {"recommendation": str(response)}
117
  except HfHubHTTPError as e:
118
  return {"error": f"HuggingFace error: {str(e)}"}
119