Eddyhzd commited on
Commit
e224b82
·
1 Parent(s): 30ea124
Files changed (1) hide show
  1. app.py +21 -20
app.py CHANGED
@@ -11,26 +11,6 @@ cle_api = os.environ.get("CLE_API_MISTRAL")
11
  # Initialisation du client Mistral (API compatible OpenAI)
12
  clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
14
- # Chatbot : simple écho Fonction chatbot reliée à Mistral
15
- def chatbot(message, history):
16
- # Préparer l’historique dans le format de Mistral
17
- messages = []
18
- for user_msg, bot_msg in history:
19
- messages.append({"role": "user", "content": user_msg})
20
- messages.append({"role": "assistant", "content": bot_msg})
21
-
22
- messages.append({"role": "user", "content": message})
23
-
24
- # Appel API Mistral
25
- response = clientLLM.chat.completions.create(
26
- model="mistral-small-latest",
27
- messages=messages
28
- )
29
-
30
- bot_reply = response.choices[0].message.content.strip()
31
- history.append(("Vous: " + message, "Bot: " + bot_reply))
32
- return history, history
33
-
34
 
35
  loop = asyncio.new_event_loop()
36
  asyncio.set_event_loop(loop)
@@ -78,6 +58,27 @@ class MCPClientWrapper:
78
  clientMCP = MCPClientWrapper()
79
  clientMCP.connect("mcp_server.py")
80
  print(f"Connected to MCP server. Available tools: {', '.join([tool['name'] for tool in clientMCP.tools])}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
  with gr.Blocks() as demo:
83
 
 
11
  # Initialisation du client Mistral (API compatible OpenAI)
12
  clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  loop = asyncio.new_event_loop()
16
  asyncio.set_event_loop(loop)
 
58
  clientMCP = MCPClientWrapper()
59
  clientMCP.connect("mcp_server.py")
60
  print(f"Connected to MCP server. Available tools: {', '.join([tool['name'] for tool in clientMCP.tools])}")
61
+
62
+ # Chatbot : simple écho Fonction chatbot reliée à Mistral
63
+ def chatbot(message, history):
64
+ # Préparer l’historique dans le format de Mistral
65
+ messages = []
66
+ for user_msg, bot_msg in history:
67
+ messages.append({"role": "user", "content": user_msg})
68
+ messages.append({"role": "assistant", "content": bot_msg})
69
+
70
+ messages.append({"role": "user", "content": message})
71
+
72
+ # Appel API Mistral
73
+ response = clientLLM.chat.completions.create(
74
+ model="mistral-small-latest",
75
+ messages=messages,
76
+ tools=clientMCP.tools
77
+ )
78
+
79
+ bot_reply = response.choices[0].message.content.strip()
80
+ history.append(("Vous: " + message, "Bot: " + bot_reply))
81
+ return history, history
82
 
83
  with gr.Blocks() as demo:
84