Eddyhzd commited on
Commit
7fdb083
·
1 Parent(s): cd1e894
Files changed (1) hide show
  1. app.py +18 -71
app.py CHANGED
@@ -1,17 +1,16 @@
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
4
- from mcp import ClientSession, StdioServerParameters
5
- from mcp.client.stdio import stdio_client
6
  import asyncio
7
  from contextlib import AsyncExitStack
 
 
8
 
9
  cle_api = os.environ.get("CLE_API_MISTRAL")
10
 
11
  # Initialisation du client Mistral (API compatible OpenAI)
12
  clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
14
-
15
  loop = asyncio.new_event_loop()
16
  asyncio.set_event_loop(loop)
17
 
@@ -21,28 +20,22 @@ class MCPClientWrapper:
21
  self.exit_stack = None
22
  self.tools = []
23
 
24
- def connect(self, server_path: str) -> str:
25
- return loop.run_until_complete(self._connect(server_path))
26
 
27
- async def _connect(self, server_path: str) -> str:
28
  if self.exit_stack:
29
  await self.exit_stack.aclose()
30
 
31
  self.exit_stack = AsyncExitStack()
32
 
33
- is_python = server_path.endswith('.py')
34
- command = "python" if is_python else "node"
35
-
36
- server_params = StdioServerParameters(
37
- command=command,
38
- args=[server_path],
39
- env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"}
40
- )
41
 
42
- stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
43
- self.stdio, self.write = stdio_transport
44
 
45
- self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
46
  await self.session.initialize()
47
 
48
  response = await self.session.list_tools()
@@ -53,15 +46,17 @@ class MCPClientWrapper:
53
  } for tool in response.tools]
54
 
55
  tool_names = [tool["name"] for tool in self.tools]
56
- return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
57
 
 
58
  clientMCP = MCPClientWrapper()
59
- clientMCP.connect("mcp_server.py")
60
  print(clientMCP.tools)
61
 
62
- # Chatbot : simple écho Fonction chatbot reliée à Mistral
 
63
  def chatbot(message, history):
64
- # Préparer l’historique dans le format de Mistral
65
  messages = []
66
  for user_msg, bot_msg in history:
67
  messages.append({"role": "user", "content": user_msg})
@@ -73,54 +68,7 @@ def chatbot(message, history):
73
  response = clientLLM.chat.completions.create(
74
  model="mistral-small-latest",
75
  messages=messages,
76
- tools=[
77
- {
78
- "type": "function",
79
- "function": {
80
- "name": "analyze_herbicide_trends",
81
- "description": "Analyze herbicide usage trends over time.",
82
- "parameters": {
83
- "type": "object",
84
- "properties": {
85
- "years_range": {"type": "string"},
86
- "plot_filter": {"type": "string"}
87
- },
88
- "required": ["years_range", "plot_filter"]
89
- }
90
- }
91
- },
92
- {
93
- "type": "function",
94
- "function": {
95
- "name": "predict_future_weed_pressure",
96
- "description": "Predict weed pressure for the next 3 years.",
97
- "parameters": {"type": "object", "properties": {}}
98
- }
99
- },
100
- {
101
- "type": "function",
102
- "function": {
103
- "name": "recommend_sensitive_crop_plots",
104
- "description": "Recommend plots for sensitive crops.",
105
- "parameters": {"type": "object", "properties": {}}
106
- }
107
- },
108
- {
109
- "type": "function",
110
- "function": {
111
- "name": "generate_technical_alternatives",
112
- "description": "Generate technical alternatives.",
113
- "parameters": {
114
- "type": "object",
115
- "properties": {
116
- "herbicide_family": {"type": "string"}
117
- },
118
- "required": ["herbicide_family"]
119
- }
120
- }
121
- }
122
- ]
123
-
124
  )
125
 
126
  bot_reply = response.choices[0].message.content.strip()
@@ -128,10 +76,9 @@ def chatbot(message, history):
128
  return history, history
129
 
130
  with gr.Blocks() as demo:
131
-
132
  chatbot_ui = gr.Chatbot(label="ChatBot")
133
  msg = gr.Textbox(placeholder="Écrivez un message...")
134
 
135
  msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
136
 
137
- demo.launch(debug=True)
 
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
 
 
4
  import asyncio
5
  from contextlib import AsyncExitStack
6
+ from mcp import ClientSession, HttpServerParameters
7
+ from mcp.client.http import http_client
8
 
9
  cle_api = os.environ.get("CLE_API_MISTRAL")
10
 
11
  # Initialisation du client Mistral (API compatible OpenAI)
12
  clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
 
14
  loop = asyncio.new_event_loop()
15
  asyncio.set_event_loop(loop)
16
 
 
20
  self.exit_stack = None
21
  self.tools = []
22
 
23
+ def connect(self, server_url: str) -> str:
24
+ return loop.run_until_complete(self._connect(server_url))
25
 
26
+ async def _connect(self, server_url: str) -> str:
27
  if self.exit_stack:
28
  await self.exit_stack.aclose()
29
 
30
  self.exit_stack = AsyncExitStack()
31
 
32
+ # Paramètres HTTP MCP
33
+ server_params = HttpServerParameters(url=server_url)
 
 
 
 
 
 
34
 
35
+ http_transport = await self.exit_stack.enter_async_context(http_client(server_params))
36
+ self.http, self.write = http_transport
37
 
38
+ self.session = await self.exit_stack.enter_async_context(ClientSession(self.http, self.write))
39
  await self.session.initialize()
40
 
41
  response = await self.session.list_tools()
 
46
  } for tool in response.tools]
47
 
48
  tool_names = [tool["name"] for tool in self.tools]
49
+ return f"Connecté au MCP {server_url}. Outils disponibles : {', '.join(tool_names)}"
50
 
51
+ # Connexion au MCP HuggingFace
52
  clientMCP = MCPClientWrapper()
53
+ print(clientMCP.connect("https://huggingface.co/spaces/HackathonCRA/mcp"))
54
  print(clientMCP.tools)
55
 
56
+
57
+ # Chatbot
58
  def chatbot(message, history):
59
+ # Préparer l’historique
60
  messages = []
61
  for user_msg, bot_msg in history:
62
  messages.append({"role": "user", "content": user_msg})
 
68
  response = clientLLM.chat.completions.create(
69
  model="mistral-small-latest",
70
  messages=messages,
71
+ tools=clientMCP.tools # ��� maintenant on injecte directement les tools MCP récupérés
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  )
73
 
74
  bot_reply = response.choices[0].message.content.strip()
 
76
  return history, history
77
 
78
  with gr.Blocks() as demo:
 
79
  chatbot_ui = gr.Chatbot(label="ChatBot")
80
  msg = gr.Textbox(placeholder="Écrivez un message...")
81
 
82
  msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
83
 
84
+ demo.launch(debug=True)