alaselababatunde commited on
Commit
846b9e4
·
1 Parent(s): 3b4ea07
Files changed (1) hide show
  1. app.py +29 -31
app.py CHANGED
@@ -3,8 +3,7 @@ import logging
3
  from fastapi import FastAPI, Request, Header, HTTPException, UploadFile, File
4
  from fastapi.responses import JSONResponse
5
  from pydantic import BaseModel
6
- from langchain_community.chat_models import ChatHF
7
- from langchain.schema import HumanMessage
8
  from PIL import Image
9
  import io
10
  from vector import query_vector
@@ -65,37 +64,40 @@ class VectorRequest(BaseModel):
65
  query: str
66
 
67
  # ==============================
68
- # HuggingFace Chat Models
69
  # ==============================
70
- chat_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
71
- disaster_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
72
- market_model = ChatHF(model_name="meta-llama/Llama-3.1-8B-Instruct", temperature=0.3)
 
73
 
74
- # Crop Doctor Vision + Language Model
75
- crop_model = ChatHF(model_name="meta-llama/Llama-3.2-11B-Vision-Instruct", temperature=0.3)
76
 
77
  # ==============================
78
  # Helper Functions
79
  # ==============================
80
- def run_chat_model(model, prompt: str):
81
  try:
82
- response = model([HumanMessage(content=prompt)])
83
- return response.content
 
 
 
84
  except Exception as e:
85
- logger.error(f"Model error: {e}")
86
  return f"⚠️ Unexpected model error: {str(e)}"
87
 
88
- def run_crop_doctor_model(model, image_bytes: bytes, symptoms: str):
89
- """Send image + text to vision-language model"""
90
  try:
91
- # Convert bytes to image
92
  image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
93
  prompt = f"Farmer reports: {symptoms}. Diagnose the crop disease and suggest treatment in simple language."
94
- # ChatHF allows messages with image objects as content
95
- response = model([HumanMessage(content=prompt, additional_kwargs={"image": image})])
96
- return response.content
 
97
  except Exception as e:
98
- logger.error(f"Crop Doctor model error: {e}")
99
  return f"⚠️ Unexpected model error: {str(e)}"
100
 
101
  # ==============================
@@ -103,32 +105,28 @@ def run_crop_doctor_model(model, image_bytes: bytes, symptoms: str):
103
  # ==============================
104
  @app.post("/crop-doctor")
105
  async def crop_doctor(symptoms: str = Header(...), image: UploadFile = File(...), authorization: str | None = Header(None)):
106
- """
107
- Receives crop image and symptom description.
108
- Returns diagnosis and suggested treatment.
109
- """
110
  check_auth(authorization)
111
  image_bytes = await image.read()
112
- result = run_crop_doctor_model(crop_model, image_bytes, symptoms)
113
- return {"diagnosis": result}
114
 
115
  @app.post("/multilingual-chat")
116
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
117
  check_auth(authorization)
118
- response = run_chat_model(chat_model, req.query)
119
- return {"reply": response}
120
 
121
  @app.post("/disaster-summarizer")
122
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
123
  check_auth(authorization)
124
- response = run_chat_model(disaster_model, req.report)
125
- return {"summary": response}
126
 
127
  @app.post("/marketplace")
128
  async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
129
  check_auth(authorization)
130
- response = run_chat_model(market_model, req.product)
131
- return {"recommendation": response}
132
 
133
  @app.post("/vector-search")
134
  async def vector_search(req: VectorRequest, authorization: str | None = Header(None)):
 
3
  from fastapi import FastAPI, Request, Header, HTTPException, UploadFile, File
4
  from fastapi.responses import JSONResponse
5
  from pydantic import BaseModel
6
+ from transformers import pipeline
 
7
  from PIL import Image
8
  import io
9
  from vector import query_vector
 
64
  query: str
65
 
66
  # ==============================
67
+ # HuggingFace Pipelines
68
  # ==============================
69
+ # Conversational models for chat, disaster, marketplace
70
+ chat_pipe = pipeline("conversational", model="meta-llama/Llama-3.1-8B-Instruct")
71
+ disaster_pipe = pipeline("conversational", model="meta-llama/Llama-3.1-8B-Instruct")
72
+ market_pipe = pipeline("conversational", model="meta-llama/Llama-3.1-8B-Instruct")
73
 
74
+ # Crop Doctor: image + text
75
+ crop_pipe = pipeline("image-to-text", model="meta-llama/Llama-3.2-11B-Vision-Instruct")
76
 
77
  # ==============================
78
  # Helper Functions
79
  # ==============================
80
+ def run_conversational(pipe, prompt: str):
81
  try:
82
+ output = pipe(prompt)
83
+ # output is a list of dicts
84
+ if isinstance(output, list) and len(output) > 0:
85
+ return output[0].get("generated_text", str(output))
86
+ return str(output)
87
  except Exception as e:
88
+ logger.error(f"Conversational pipeline error: {e}")
89
  return f"⚠️ Unexpected model error: {str(e)}"
90
 
91
+ def run_crop_doctor(image_bytes: bytes, symptoms: str):
 
92
  try:
 
93
  image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
94
  prompt = f"Farmer reports: {symptoms}. Diagnose the crop disease and suggest treatment in simple language."
95
+ output = crop_pipe(image, prompt=prompt)
96
+ if isinstance(output, list) and len(output) > 0:
97
+ return output[0].get("generated_text", str(output))
98
+ return str(output)
99
  except Exception as e:
100
+ logger.error(f"Crop Doctor pipeline error: {e}")
101
  return f"⚠️ Unexpected model error: {str(e)}"
102
 
103
  # ==============================
 
105
  # ==============================
106
  @app.post("/crop-doctor")
107
  async def crop_doctor(symptoms: str = Header(...), image: UploadFile = File(...), authorization: str | None = Header(None)):
 
 
 
 
108
  check_auth(authorization)
109
  image_bytes = await image.read()
110
+ diagnosis = run_crop_doctor(image_bytes, symptoms)
111
+ return {"diagnosis": diagnosis}
112
 
113
  @app.post("/multilingual-chat")
114
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
115
  check_auth(authorization)
116
+ reply = run_conversational(chat_pipe, req.query)
117
+ return {"reply": reply}
118
 
119
  @app.post("/disaster-summarizer")
120
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
121
  check_auth(authorization)
122
+ summary = run_conversational(disaster_pipe, req.report)
123
+ return {"summary": summary}
124
 
125
  @app.post("/marketplace")
126
  async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
127
  check_auth(authorization)
128
+ recommendation = run_conversational(market_pipe, req.product)
129
+ return {"recommendation": recommendation}
130
 
131
  @app.post("/vector-search")
132
  async def vector_search(req: VectorRequest, authorization: str | None = Header(None)):