alaselababatunde commited on
Commit
2b1f3cb
·
1 Parent(s): 50bddae
Files changed (1) hide show
  1. app.py +41 -79
app.py CHANGED
@@ -1,11 +1,12 @@
1
  import os
2
  import logging
3
- from fastapi import FastAPI, Request, Header, HTTPException
4
  from fastapi.responses import JSONResponse
5
  from pydantic import BaseModel
6
- from langchain.prompts import PromptTemplate
7
- from langchain_huggingface import HuggingFaceEndpoint
8
- from huggingface_hub.utils import HfHubHTTPError
 
9
  from vector import query_vector
10
 
11
  # ==============================
@@ -29,8 +30,7 @@ async def root():
29
  PROJECT_API_KEY = os.getenv("PROJECT_API_KEY", "agricopilot404")
30
 
31
  def check_auth(authorization: str | None):
32
- """Validate Bearer token against PROJECT_API_KEY"""
33
- if not PROJECT_API_KEY: # If key not set, skip validation
34
  return
35
  if not authorization or not authorization.startswith("Bearer "):
36
  raise HTTPException(status_code=401, detail="Missing bearer token")
@@ -52,9 +52,6 @@ async def global_exception_handler(request: Request, exc: Exception):
52
  # ==============================
53
  # Request Models
54
  # ==============================
55
- class CropRequest(BaseModel):
56
- symptoms: str
57
-
58
  class ChatRequest(BaseModel):
59
  query: str
60
 
@@ -68,104 +65,69 @@ class VectorRequest(BaseModel):
68
  query: str
69
 
70
  # ==============================
71
- # PROMPTS
72
  # ==============================
73
- crop_template = PromptTemplate(
74
- input_variables=["symptoms"],
75
- template="You are AgriCopilot, a multilingual AI assistant created to support farmers. Farmer reports: {symptoms}. Diagnose the most likely disease and suggest treatments in simple farmer-friendly language."
76
- )
77
-
78
- chat_template = PromptTemplate(
79
- input_variables=["query"],
80
- template="You are AgriCopilot, a supportive multilingual AI guide built for farmers. Farmer says: {query}"
81
- )
82
-
83
- disaster_template = PromptTemplate(
84
- input_variables=["report"],
85
- template="You are AgriCopilot, an AI disaster-response assistant. Summarize in simple steps: {report}"
86
- )
87
 
88
- market_template = PromptTemplate(
89
- input_variables=["product"],
90
- template="You are AgriCopilot, an AI agricultural marketplace advisor. Farmer wants to sell or buy: {product}. Suggest best options and advice."
91
- )
92
 
93
  # ==============================
94
- # HuggingFace Models
95
  # ==============================
96
- def make_llm(repo_id: str):
97
- return HuggingFaceEndpoint(
98
- repo_id=repo_id,
99
- task="conversational", # conversational for HF models
100
- temperature=0.3,
101
- top_p=0.9,
102
- do_sample=True,
103
- repetition_penalty=1.1,
104
- max_new_tokens=1024
105
- )
106
-
107
- crop_llm = make_llm("meta-llama/Llama-3.2-11B-Vision-Instruct")
108
- chat_llm = make_llm("meta-llama/Llama-3.1-8B-Instruct")
109
- disaster_llm = make_llm("meta-llama/Llama-3.1-8B-Instruct")
110
- market_llm = make_llm("meta-llama/Llama-3.1-8B-Instruct")
111
-
112
- # ==============================
113
- # ENDPOINT HELPERS
114
- # ==============================
115
- def run_conversational_model(model, prompt: str):
116
- """Send plain text prompt to HuggingFaceEndpoint and capture response"""
117
  try:
118
- logger.info(f"Sending prompt to HF model: {prompt}")
119
- # Pass prompt as a list of messages for conversational models
120
- result = model.invoke([{"role": "user", "content": prompt}])
121
- logger.info(f"HF raw response: {result}")
122
- except HfHubHTTPError as e:
123
- if "exceeded" in str(e).lower() or "quota" in str(e).lower():
124
- return {"parsed": None, "raw": "⚠️ HuggingFace daily quota reached. Try again later."}
125
- return {"parsed": None, "raw": f"⚠️ HuggingFace error: {str(e)}"}
126
  except Exception as e:
127
- return {"parsed": None, "raw": f"⚠️ Unexpected model error: {str(e)}"}
128
-
129
- # Parse output
130
- parsed_text = None
131
- if isinstance(result, list) and len(result) > 0 and "content" in result[0]:
132
- parsed_text = result[0]["content"]
133
- elif isinstance(result, dict) and "generated_text" in result:
134
- parsed_text = result["generated_text"]
135
- else:
136
- parsed_text = str(result)
137
 
138
- return {"parsed": parsed_text, "raw": result}
 
 
 
 
 
 
 
 
 
 
 
139
 
140
  # ==============================
141
  # ENDPOINTS
142
  # ==============================
143
  @app.post("/crop-doctor")
144
- async def crop_doctor(req: CropRequest, authorization: str | None = Header(None)):
 
 
 
 
145
  check_auth(authorization)
146
- prompt = crop_template.format(symptoms=req.symptoms)
147
- response = run_conversational_model(crop_llm, prompt)
148
- return {"diagnosis": response}
149
 
150
  @app.post("/multilingual-chat")
151
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
152
  check_auth(authorization)
153
- prompt = chat_template.format(query=req.query)
154
- response = run_conversational_model(chat_llm, prompt)
155
  return {"reply": response}
156
 
157
  @app.post("/disaster-summarizer")
158
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
159
  check_auth(authorization)
160
- prompt = disaster_template.format(report=req.report)
161
- response = run_conversational_model(disaster_llm, prompt)
162
  return {"summary": response}
163
 
164
  @app.post("/marketplace")
165
  async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
166
  check_auth(authorization)
167
- prompt = market_template.format(product=req.product)
168
- response = run_conversational_model(market_llm, prompt)
169
  return {"recommendation": response}
170
 
171
  @app.post("/vector-search")
 
1
  import os
2
  import logging
3
+ from fastapi import FastAPI, Request, Header, HTTPException, UploadFile, File
4
  from fastapi.responses import JSONResponse
5
  from pydantic import BaseModel
6
+ from langchain.chat_models import ChatHF
7
+ from langchain.schema import HumanMessage, AIMessage
8
+ from PIL import Image
9
+ import io
10
  from vector import query_vector
11
 
12
  # ==============================
 
30
  PROJECT_API_KEY = os.getenv("PROJECT_API_KEY", "agricopilot404")
31
 
32
  def check_auth(authorization: str | None):
33
+ if not PROJECT_API_KEY:
 
34
  return
35
  if not authorization or not authorization.startswith("Bearer "):
36
  raise HTTPException(status_code=401, detail="Missing bearer token")
 
52
  # ==============================
53
  # Request Models
54
  # ==============================
 
 
 
55
  class ChatRequest(BaseModel):
56
  query: str
57
 
 
65
  query: str
66
 
67
  # ==============================
68
+ # HuggingFace Chat Models
69
  # ==============================
70
+ chat_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
71
+ disaster_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
72
+ market_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
 
 
 
 
 
 
 
 
 
 
 
73
 
74
+ # Crop Doctor Vision + Language Model
75
+ crop_model = ChatHF(model_name="meta-llama/Llama-3.2-11B-Vision-Instruct", temperature=0.3)
 
 
76
 
77
  # ==============================
78
+ # Helper Functions
79
  # ==============================
80
+ def run_chat_model(model, prompt: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  try:
82
+ response = model([HumanMessage(content=prompt)])
83
+ return response.content
 
 
 
 
 
 
84
  except Exception as e:
85
+ logger.error(f"Model error: {e}")
86
+ return f"⚠️ Unexpected model error: {str(e)}"
 
 
 
 
 
 
 
 
87
 
88
+ def run_crop_doctor_model(model, image_bytes: bytes, symptoms: str):
89
+ """Send image + text to vision-language model"""
90
+ try:
91
+ # Convert bytes to image
92
+ image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
93
+ prompt = f"Farmer reports: {symptoms}. Diagnose the crop disease and suggest treatment in simple language."
94
+ # ChatHF allows messages with image objects as content
95
+ response = model([HumanMessage(content=prompt, additional_kwargs={"image": image})])
96
+ return response.content
97
+ except Exception as e:
98
+ logger.error(f"Crop Doctor model error: {e}")
99
+ return f"⚠️ Unexpected model error: {str(e)}"
100
 
101
  # ==============================
102
  # ENDPOINTS
103
  # ==============================
104
  @app.post("/crop-doctor")
105
+ async def crop_doctor(symptoms: str = Header(...), image: UploadFile = File(...), authorization: str | None = Header(None)):
106
+ """
107
+ Receives crop image and symptom description.
108
+ Returns diagnosis and suggested treatment.
109
+ """
110
  check_auth(authorization)
111
+ image_bytes = await image.read()
112
+ result = run_crop_doctor_model(crop_model, image_bytes, symptoms)
113
+ return {"diagnosis": result}
114
 
115
  @app.post("/multilingual-chat")
116
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
117
  check_auth(authorization)
118
+ response = run_chat_model(chat_model, req.query)
 
119
  return {"reply": response}
120
 
121
  @app.post("/disaster-summarizer")
122
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
123
  check_auth(authorization)
124
+ response = run_chat_model(disaster_model, req.report)
 
125
  return {"summary": response}
126
 
127
  @app.post("/marketplace")
128
  async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
129
  check_auth(authorization)
130
+ response = run_chat_model(market_model, req.product)
 
131
  return {"recommendation": response}
132
 
133
  @app.post("/vector-search")