alaselababatunde commited on
Commit
41b31c2
Β·
1 Parent(s): b94f09a
Files changed (1) hide show
  1. app.py +66 -43
app.py CHANGED
@@ -1,11 +1,11 @@
1
  import os
2
  import logging
3
- from fastapi import FastAPI, Request
4
  from fastapi.responses import JSONResponse
 
 
5
  from langchain.prompts import PromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
- from huggingface_hub.utils import HfHubHTTPError
8
- from langchain.schema import HumanMessage # βœ… For conversational inputs
9
  from vector import query_vector
10
 
11
  # ==============================
@@ -14,6 +14,11 @@ from vector import query_vector
14
  logging.basicConfig(level=logging.INFO)
15
  logger = logging.getLogger("AgriCopilot")
16
 
 
 
 
 
 
17
  # ==============================
18
  # App Init
19
  # ==============================
@@ -29,96 +34,114 @@ async def root():
29
  @app.exception_handler(Exception)
30
  async def global_exception_handler(request: Request, exc: Exception):
31
  logger.error(f"Unhandled error: {exc}")
32
- return JSONResponse(
33
- status_code=500,
34
- content={"error": str(exc)},
35
- )
 
 
 
 
 
 
 
 
 
36
 
37
  # ==============================
38
- # MODELS PER ENDPOINT (Meta Models, Conversational)
 
 
 
 
 
 
 
 
 
 
 
39
  # ==============================
40
 
41
  # 1. Crop Doctor
42
  crop_template = PromptTemplate(
43
  input_variables=["symptoms"],
44
- template="You are an agricultural crop doctor. A farmer reports: {symptoms}. Diagnose the most likely disease and suggest treatments in simple farmer-friendly language."
45
- )
46
- crop_llm = HuggingFaceEndpoint(
47
- repo_id="meta-llama/Llama-3.2-11B-Vision-Instruct",
48
- task="conversational"
49
  )
 
 
50
 
51
  # 2. Multilingual Chat
52
  chat_template = PromptTemplate(
53
  input_variables=["query"],
54
- template="You are a multilingual AI assistant for farmers. Answer clearly in the same language as the user. Farmer says: {query}"
55
- )
56
- chat_llm = HuggingFaceEndpoint(
57
- repo_id="meta-llama/Llama-3.1-8B-Instruct",
58
- task="conversational"
59
  )
 
 
60
 
61
  # 3. Disaster Summarizer
62
  disaster_template = PromptTemplate(
63
  input_variables=["report"],
64
- template="You are an AI disaster assistant. Summarize the following report for farmers in simple steps: {report}"
65
- )
66
- disaster_llm = HuggingFaceEndpoint(
67
- repo_id="meta-llama/Llama-3.1-8B-Instruct",
68
- task="conversational"
69
  )
 
 
70
 
71
- # 4. Marketplace Recommendation
72
  market_template = PromptTemplate(
73
  input_variables=["product"],
74
- template="You are an agricultural marketplace recommender. Farmer wants to sell or buy: {product}. Suggest possible matches and advice."
75
- )
76
- market_llm = HuggingFaceEndpoint(
77
- repo_id="meta-llama/Llama-3.1-8B-Instruct",
78
- task="conversational"
79
  )
 
 
80
 
81
  # ==============================
82
- # ENDPOINTS
83
  # ==============================
84
  @app.post("/crop-doctor")
85
- async def crop_doctor(symptoms: str):
86
- prompt = crop_template.format(symptoms=symptoms)
87
  try:
88
- response = crop_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
89
  return {"diagnosis": str(response)}
90
  except HfHubHTTPError as e:
91
  return {"error": f"HuggingFace error: {str(e)}"}
92
 
93
  @app.post("/multilingual-chat")
94
- async def multilingual_chat(query: str):
95
- prompt = chat_template.format(query=query)
96
  try:
97
- response = chat_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
98
  return {"reply": str(response)}
99
  except HfHubHTTPError as e:
100
  return {"error": f"HuggingFace error: {str(e)}"}
101
 
102
  @app.post("/disaster-summarizer")
103
- async def disaster_summarizer(report: str):
104
- prompt = disaster_template.format(report=report)
105
  try:
106
- response = disaster_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
107
  return {"summary": str(response)}
108
  except HfHubHTTPError as e:
109
  return {"error": f"HuggingFace error: {str(e)}"}
110
 
111
  @app.post("/marketplace")
112
- async def marketplace(product: str):
113
- prompt = market_template.format(product=product)
114
  try:
115
- response = market_llm.invoke([HumanMessage(content=prompt)]) # βœ… FIXED
116
  return {"recommendation": str(response)}
117
  except HfHubHTTPError as e:
118
  return {"error": f"HuggingFace error: {str(e)}"}
119
 
120
  @app.post("/vector-search")
121
- async def vector_search(query: str):
 
122
  try:
123
  results = query_vector(query)
124
  return {"results": results}
 
1
  import os
2
  import logging
3
+ from fastapi import FastAPI, Request, Header, HTTPException
4
  from fastapi.responses import JSONResponse
5
+ from fastapi.middleware.cors import CORSMiddleware
6
+ from huggingface_hub.utils import HfHubHTTPError
7
  from langchain.prompts import PromptTemplate
8
  from langchain_huggingface import HuggingFaceEndpoint
 
 
9
  from vector import query_vector
10
 
11
  # ==============================
 
14
  logging.basicConfig(level=logging.INFO)
15
  logger = logging.getLogger("AgriCopilot")
16
 
17
+ # ==============================
18
+ # Config
19
+ # ==============================
20
+ PROJECT_API_KEY = os.getenv("PROJECT_API_KEY", "super-secret-123") # πŸ”‘ Change in prod
21
+
22
  # ==============================
23
  # App Init
24
  # ==============================
 
34
  @app.exception_handler(Exception)
35
  async def global_exception_handler(request: Request, exc: Exception):
36
  logger.error(f"Unhandled error: {exc}")
37
+ return JSONResponse(status_code=500, content={"error": str(exc)})
38
+
39
+ # ==============================
40
+ # Auth Helper
41
+ # ==============================
42
+ def check_auth(authorization: str | None):
43
+ if not PROJECT_API_KEY:
44
+ return
45
+ if not authorization or not authorization.startswith("Bearer "):
46
+ raise HTTPException(status_code=401, detail="Missing bearer token")
47
+ token = authorization.split(" ", 1)[1]
48
+ if token != PROJECT_API_KEY:
49
+ raise HTTPException(status_code=403, detail="Invalid token")
50
 
51
  # ==============================
52
+ # HuggingFace Model Config
53
+ # ==============================
54
+ default_model = dict(
55
+ temperature=0.3,
56
+ top_p=0.9,
57
+ do_sample=True,
58
+ repetition_penalty=1.1,
59
+ max_new_tokens=1024
60
+ )
61
+
62
+ # ==============================
63
+ # CHAINS
64
  # ==============================
65
 
66
  # 1. Crop Doctor
67
  crop_template = PromptTemplate(
68
  input_variables=["symptoms"],
69
+ template="You are AgriCopilot, a multilingual AI assistant for farmers. "
70
+ "A farmer reports: {symptoms}. Diagnose the likely disease and suggest "
71
+ "clear, farmer-friendly treatments."
 
 
72
  )
73
+ crop_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.2-11B-Vision-Instruct", **default_model)
74
+ crop_chain = crop_template | crop_llm
75
 
76
  # 2. Multilingual Chat
77
  chat_template = PromptTemplate(
78
  input_variables=["query"],
79
+ template="You are AgriCopilot, a supportive multilingual AI guide. "
80
+ "Respond in the SAME language as the user. Farmer says: {query}"
 
 
 
81
  )
82
+ chat_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct", **default_model)
83
+ chat_chain = chat_template | chat_llm
84
 
85
  # 3. Disaster Summarizer
86
  disaster_template = PromptTemplate(
87
  input_variables=["report"],
88
+ template="You are AgriCopilot, an AI disaster assistant. "
89
+ "Summarize the following report into 3–5 short, actionable steps farmers can follow: {report}"
 
 
 
90
  )
91
+ disaster_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct", **default_model)
92
+ disaster_chain = disaster_template | disaster_llm
93
 
94
+ # 4. Marketplace Recommender
95
  market_template = PromptTemplate(
96
  input_variables=["product"],
97
+ template="You are AgriCopilot, an agricultural marketplace advisor. "
98
+ "Farmer wants to sell or buy: {product}. Suggest options, advice, and safe trade tips."
 
 
 
99
  )
100
+ market_llm = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct", **default_model)
101
+ market_chain = market_template | market_llm
102
 
103
  # ==============================
104
+ # ENDPOINTS (with auth)
105
  # ==============================
106
  @app.post("/crop-doctor")
107
+ async def crop_doctor(symptoms: str, authorization: str | None = Header(None)):
108
+ check_auth(authorization)
109
  try:
110
+ response = crop_chain.invoke({"symptoms": symptoms})
111
  return {"diagnosis": str(response)}
112
  except HfHubHTTPError as e:
113
  return {"error": f"HuggingFace error: {str(e)}"}
114
 
115
  @app.post("/multilingual-chat")
116
+ async def multilingual_chat(query: str, authorization: str | None = Header(None)):
117
+ check_auth(authorization)
118
  try:
119
+ response = chat_chain.invoke({"query": query})
120
  return {"reply": str(response)}
121
  except HfHubHTTPError as e:
122
  return {"error": f"HuggingFace error: {str(e)}"}
123
 
124
  @app.post("/disaster-summarizer")
125
+ async def disaster_summarizer(report: str, authorization: str | None = Header(None)):
126
+ check_auth(authorization)
127
  try:
128
+ response = disaster_chain.invoke({"report": report})
129
  return {"summary": str(response)}
130
  except HfHubHTTPError as e:
131
  return {"error": f"HuggingFace error: {str(e)}"}
132
 
133
  @app.post("/marketplace")
134
+ async def marketplace(product: str, authorization: str | None = Header(None)):
135
+ check_auth(authorization)
136
  try:
137
+ response = market_chain.invoke({"product": product})
138
  return {"recommendation": str(response)}
139
  except HfHubHTTPError as e:
140
  return {"error": f"HuggingFace error: {str(e)}"}
141
 
142
  @app.post("/vector-search")
143
+ async def vector_search(query: str, authorization: str | None = Header(None)):
144
+ check_auth(authorization)
145
  try:
146
  results = query_vector(query)
147
  return {"results": results}