alaselababatunde commited on
Commit
766b563
·
1 Parent(s): dab8605
Files changed (1) hide show
  1. app.py +78 -74
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import os
2
  import logging
3
- from fastapi import FastAPI, Header, HTTPException
 
4
  from fastapi.middleware.cors import CORSMiddleware
5
  from pydantic import BaseModel
6
  from langchain.prompts import PromptTemplate
@@ -9,36 +10,49 @@ from huggingface_hub.utils import HfHubHTTPError
9
  from langchain.schema import HumanMessage
10
  from vector import query_vector
11
 
12
- # ----------------- CONFIG -----------------
 
 
13
  logging.basicConfig(level=logging.INFO)
14
  logger = logging.getLogger("AgriCopilot")
15
 
16
- PROJECT_API_KEY = os.getenv("PROJECT_API_KEY", "super-secret-123")
17
-
18
- # FastAPI app
19
  app = FastAPI(title="AgriCopilot")
20
 
21
- # CORS
22
- app.add_middleware(
23
- CORSMiddleware,
24
- allow_origins=["*"], # Change to frontend URL in prod
25
- allow_credentials=True,
26
- allow_methods=["*"],
27
- allow_headers=["*"],
28
- )
29
 
30
- # ----------------- AUTH -----------------
31
  def check_auth(authorization: str | None):
32
- if not PROJECT_API_KEY:
33
- return
34
  if not authorization or not authorization.startswith("Bearer "):
35
  raise HTTPException(status_code=401, detail="Missing bearer token")
36
  token = authorization.split(" ", 1)[1]
37
  if token != PROJECT_API_KEY:
38
  raise HTTPException(status_code=403, detail="Invalid token")
39
 
40
- # ----------------- REQUEST MODELS -----------------
41
- class CropDoctorRequest(BaseModel):
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  symptoms: str
43
 
44
  class ChatRequest(BaseModel):
@@ -47,98 +61,88 @@ class ChatRequest(BaseModel):
47
  class DisasterRequest(BaseModel):
48
  report: str
49
 
50
- class MarketplaceRequest(BaseModel):
51
  product: str
52
 
53
  class VectorRequest(BaseModel):
54
  query: str
55
 
56
- # ----------------- PROMPT TEMPLATES -----------------
 
 
57
  crop_template = PromptTemplate(
58
  input_variables=["symptoms"],
59
- template="You are AgriCopilot, a multilingual AI crop doctor. Farmer reports: {symptoms}. Diagnose the disease and suggest treatments in simple farmer-friendly language."
 
 
 
 
 
60
  )
61
 
62
  chat_template = PromptTemplate(
63
  input_variables=["query"],
64
  template="You are AgriCopilot, a supportive multilingual AI guide built for farmers. Farmer says: {query}"
65
  )
 
 
 
 
 
66
 
67
  disaster_template = PromptTemplate(
68
  input_variables=["report"],
69
- template="You are AgriCopilot, an AI disaster assistant. Summarize the following report for farmers in simple steps: {report}"
 
 
 
 
 
70
  )
71
 
72
  market_template = PromptTemplate(
73
  input_variables=["product"],
74
- template="You are AgriCopilot, an agricultural marketplace recommender. Farmer wants: {product}. Suggest buyers/sellers and short advice."
 
 
 
 
 
75
  )
76
 
77
- # ----------------- LLM MODELS -----------------
78
- crop_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.2-11B-Vision-Instruct")
79
- chat_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct")
80
- disaster_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct")
81
- market_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct")
82
-
83
- # ----------------- ROOT -----------------
84
- @app.get("/")
85
- async def root():
86
- return {"status": "✅ AgriCopilot AI Backend running"}
87
-
88
- # ----------------- ENDPOINTS -----------------
89
  @app.post("/crop-doctor")
90
- async def crop_doctor(req: CropDoctorRequest, authorization: str | None = Header(None)):
91
  check_auth(authorization)
92
- try:
93
- prompt = crop_template.format(symptoms=req.symptoms)
94
- response = crop_llm.invoke([HumanMessage(content=prompt)])
95
- return {"success": True, "diagnosis": str(response)}
96
- except HfHubHTTPError as e:
97
- if "quota" in str(e).lower():
98
- return {"success": False, "error": "⚠️ Model quota exceeded. Try again later."}
99
- raise e
100
 
101
  @app.post("/multilingual-chat")
102
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
103
  check_auth(authorization)
104
- try:
105
- prompt = chat_template.format(query=req.query)
106
- response = chat_llm.invoke([HumanMessage(content=prompt)])
107
- return {"success": True, "reply": str(response)}
108
- except HfHubHTTPError as e:
109
- if "quota" in str(e).lower():
110
- return {"success": False, "error": "⚠️ Model quota exceeded. Try again later."}
111
- raise e
112
 
113
  @app.post("/disaster-summarizer")
114
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
115
  check_auth(authorization)
116
- try:
117
- prompt = disaster_template.format(report=req.report)
118
- response = disaster_llm.invoke([HumanMessage(content=prompt)])
119
- return {"success": True, "summary": str(response)}
120
- except HfHubHTTPError as e:
121
- if "quota" in str(e).lower():
122
- return {"success": False, "error": "⚠️ Model quota exceeded. Try again later."}
123
- raise e
124
 
125
  @app.post("/marketplace")
126
- async def marketplace(req: MarketplaceRequest, authorization: str | None = Header(None)):
127
  check_auth(authorization)
128
- try:
129
- prompt = market_template.format(product=req.product)
130
- response = market_llm.invoke([HumanMessage(content=prompt)])
131
- return {"success": True, "recommendation": str(response)}
132
- except HfHubHTTPError as e:
133
- if "quota" in str(e).lower():
134
- return {"success": False, "error": "⚠️ Model quota exceeded. Try again later."}
135
- raise e
136
 
137
  @app.post("/vector-search")
138
  async def vector_search(req: VectorRequest, authorization: str | None = Header(None)):
139
  check_auth(authorization)
140
- try:
141
- results = query_vector(req.query)
142
- return {"success": True, "results": results}
143
- except Exception as e:
144
- return {"success": False, "error": str(e)}
 
1
  import os
2
  import logging
3
+ from fastapi import FastAPI, Request, Header, HTTPException
4
+ from fastapi.responses import JSONResponse
5
  from fastapi.middleware.cors import CORSMiddleware
6
  from pydantic import BaseModel
7
  from langchain.prompts import PromptTemplate
 
10
  from langchain.schema import HumanMessage
11
  from vector import query_vector
12
 
13
+ # ==============================
14
+ # Setup Logging
15
+ # ==============================
16
  logging.basicConfig(level=logging.INFO)
17
  logger = logging.getLogger("AgriCopilot")
18
 
19
+ # ==============================
20
+ # App Init
21
+ # ==============================
22
  app = FastAPI(title="AgriCopilot")
23
 
24
+ @app.get("/")
25
+ async def root():
26
+ return {"status": "AgriCopilot AI Backend is working perfectly"}
27
+
28
+ # ==============================
29
+ # AUTH CONFIG
30
+ # ==============================
31
+ PROJECT_API_KEY = "agricopilot404" # 🔑 Fixed bearer token for hackathon
32
 
 
33
  def check_auth(authorization: str | None):
34
+ """Validate Bearer token against PROJECT_API_KEY"""
 
35
  if not authorization or not authorization.startswith("Bearer "):
36
  raise HTTPException(status_code=401, detail="Missing bearer token")
37
  token = authorization.split(" ", 1)[1]
38
  if token != PROJECT_API_KEY:
39
  raise HTTPException(status_code=403, detail="Invalid token")
40
 
41
+ # ==============================
42
+ # Global Exception Handler
43
+ # ==============================
44
+ @app.exception_handler(Exception)
45
+ async def global_exception_handler(request: Request, exc: Exception):
46
+ logger.error(f"Unhandled error: {exc}")
47
+ return JSONResponse(
48
+ status_code=500,
49
+ content={"error": str(exc)},
50
+ )
51
+
52
+ # ==============================
53
+ # Request Models
54
+ # ==============================
55
+ class CropRequest(BaseModel):
56
  symptoms: str
57
 
58
  class ChatRequest(BaseModel):
 
61
  class DisasterRequest(BaseModel):
62
  report: str
63
 
64
+ class MarketRequest(BaseModel):
65
  product: str
66
 
67
  class VectorRequest(BaseModel):
68
  query: str
69
 
70
+ # ==============================
71
+ # MODELS PER ENDPOINT
72
+ # ==============================
73
  crop_template = PromptTemplate(
74
  input_variables=["symptoms"],
75
+ template="You are AgriCopilot, a multilingual AI assistant created to support farmers. Farmer reports: {symptoms}. Diagnose the most likely disease and suggest treatments in simple farmer-friendly language."
76
+ )
77
+ crop_llm = HuggingFaceEndpoint(
78
+ repo_id="meta-llama/Llama-3.2-11B-Vision-Instruct",
79
+ temperature=0.3, top_p=0.9, do_sample=True,
80
+ repetition_penalty=1.1, max_new_tokens=1024
81
  )
82
 
83
  chat_template = PromptTemplate(
84
  input_variables=["query"],
85
  template="You are AgriCopilot, a supportive multilingual AI guide built for farmers. Farmer says: {query}"
86
  )
87
+ chat_llm = HuggingFaceEndpoint(
88
+ repo_id="meta-llama/Llama-3.1-8B-Instruct",
89
+ temperature=0.3, top_p=0.9, do_sample=True,
90
+ repetition_penalty=1.1, max_new_tokens=1024
91
+ )
92
 
93
  disaster_template = PromptTemplate(
94
  input_variables=["report"],
95
+ template="You are AgriCopilot, an AI disaster-response assistant. Summarize in simple steps: {report}"
96
+ )
97
+ disaster_llm = HuggingFaceEndpoint(
98
+ repo_id="meta-llama/Llama-3.1-8B-Instruct",
99
+ temperature=0.3, top_p=0.9, do_sample=True,
100
+ repetition_penalty=1.1, max_new_tokens=1024
101
  )
102
 
103
  market_template = PromptTemplate(
104
  input_variables=["product"],
105
+ template="You are AgriCopilot, an AI agricultural marketplace advisor. Farmer wants to sell or buy: {product}. Suggest best options and advice."
106
+ )
107
+ market_llm = HuggingFaceEndpoint(
108
+ repo_id="meta-llama/Llama-3.1-8B-Instruct",
109
+ temperature=0.3, top_p=0.9, do_sample=True,
110
+ repetition_penalty=1.1, max_new_tokens=1024
111
  )
112
 
113
+ # ==============================
114
+ # ENDPOINTS
115
+ # ==============================
 
 
 
 
 
 
 
 
 
116
  @app.post("/crop-doctor")
117
+ async def crop_doctor(req: CropRequest, authorization: str | None = Header(None)):
118
  check_auth(authorization)
119
+ prompt = crop_template.format(symptoms=req.symptoms)
120
+ response = crop_llm.invoke([HumanMessage(content=prompt)])
121
+ return {"diagnosis": str(response)}
 
 
 
 
 
122
 
123
  @app.post("/multilingual-chat")
124
  async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
125
  check_auth(authorization)
126
+ prompt = chat_template.format(query=req.query)
127
+ response = chat_llm.invoke([HumanMessage(content=prompt)])
128
+ return {"reply": str(response)}
 
 
 
 
 
129
 
130
  @app.post("/disaster-summarizer")
131
  async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
132
  check_auth(authorization)
133
+ prompt = disaster_template.format(report=req.report)
134
+ response = disaster_llm.invoke([HumanMessage(content=prompt)])
135
+ return {"summary": str(response)}
 
 
 
 
 
136
 
137
  @app.post("/marketplace")
138
+ async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
139
  check_auth(authorization)
140
+ prompt = market_template.format(product=req.product)
141
+ response = market_llm.invoke([HumanMessage(content=prompt)])
142
+ return {"recommendation": str(response)}
 
 
 
 
 
143
 
144
  @app.post("/vector-search")
145
  async def vector_search(req: VectorRequest, authorization: str | None = Header(None)):
146
  check_auth(authorization)
147
+ results = query_vector(req.query)
148
+ return {"results": results}