Eddyhzd commited on
Commit
bb066a6
·
1 Parent(s): 50051d2

rollback mistral

Browse files
Files changed (1) hide show
  1. app.py +14 -27
app.py CHANGED
@@ -1,22 +1,20 @@
1
  import gradio as gr
 
2
  import os
3
- import asyncio
4
- from anthropic import Anthropic
5
  from mcp import ClientSession, StdioServerParameters
6
  from mcp.client.stdio import stdio_client
 
7
  from contextlib import AsyncExitStack
8
 
9
- # --- Clé API Anthropique ---
10
- cle_api = os.environ.get("CLE_API_ANTHROPIC")
 
 
11
 
12
- # Initialisation du client Anthropic
13
- clientLLM = Anthropic(api_key=cle_api)
14
 
15
- # --- Event Loop ---
16
  loop = asyncio.new_event_loop()
17
  asyncio.set_event_loop(loop)
18
 
19
- # --- Wrapper MCP ---
20
  class MCPClientWrapper:
21
  def __init__(self):
22
  self.session = None
@@ -57,15 +55,13 @@ class MCPClientWrapper:
57
  tool_names = [tool["name"] for tool in self.tools]
58
  return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
59
 
60
- # --- Connexion au serveur MCP ---
61
  clientMCP = MCPClientWrapper()
62
  clientMCP.connect("mcp_server.py")
63
  print(clientMCP.tools)
64
 
65
-
66
- # --- Fonction chatbot reliée à Claude ---
67
  def chatbot(message, history):
68
- # Préparer l’historique pour Anthropique
69
  messages = []
70
  for user_msg, bot_msg in history:
71
  messages.append({"role": "user", "content": user_msg})
@@ -73,26 +69,17 @@ def chatbot(message, history):
73
 
74
  messages.append({"role": "user", "content": message})
75
 
76
- # Appel API Anthropique
77
- response = clientLLM.messages.create(
78
- model="claude-3-5-sonnet-20240620", # tu peux changer pour claude-3-opus ou autre
79
- max_tokens=500,
80
  messages=messages,
81
- tools=clientMCP.tools if clientMCP.tools else None
82
  )
83
 
84
- # Récupération de la réponse
85
- bot_reply = ""
86
- if response.content:
87
- for block in response.content:
88
- if block.type == "text":
89
- bot_reply += block.text.strip()
90
-
91
  history.append(("Vous: " + message, "Bot: " + bot_reply))
92
  return history, history
93
 
94
-
95
- # --- UI Gradio ---
96
  with gr.Blocks() as demo:
97
 
98
  chatbot_ui = gr.Chatbot(label="ChatBot")
@@ -100,4 +87,4 @@ with gr.Blocks() as demo:
100
 
101
  msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
102
 
103
- demo.launch(debug=True)
 
1
  import gradio as gr
2
+ from openai import OpenAI
3
  import os
 
 
4
  from mcp import ClientSession, StdioServerParameters
5
  from mcp.client.stdio import stdio_client
6
+ import asyncio
7
  from contextlib import AsyncExitStack
8
 
9
+ cle_api = os.environ.get("CLE_API_MISTRAL")
10
+
11
+ # Initialisation du client Mistral (API compatible OpenAI)
12
+ clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
 
 
14
 
 
15
  loop = asyncio.new_event_loop()
16
  asyncio.set_event_loop(loop)
17
 
 
18
  class MCPClientWrapper:
19
  def __init__(self):
20
  self.session = None
 
55
  tool_names = [tool["name"] for tool in self.tools]
56
  return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
57
 
 
58
  clientMCP = MCPClientWrapper()
59
  clientMCP.connect("mcp_server.py")
60
  print(clientMCP.tools)
61
 
62
+ # Chatbot : simple écho Fonction chatbot reliée à Mistral
 
63
  def chatbot(message, history):
64
+ # Préparer l’historique dans le format de Mistral
65
  messages = []
66
  for user_msg, bot_msg in history:
67
  messages.append({"role": "user", "content": user_msg})
 
69
 
70
  messages.append({"role": "user", "content": message})
71
 
72
+ # Appel API Mistral
73
+ response = clientLLM.chat.completions.create(
74
+ model="mistral-small-latest",
 
75
  messages=messages,
76
+ tools=clientMCP.tools
77
  )
78
 
79
+ bot_reply = response.choices[0].message.content.strip()
 
 
 
 
 
 
80
  history.append(("Vous: " + message, "Bot: " + bot_reply))
81
  return history, history
82
 
 
 
83
  with gr.Blocks() as demo:
84
 
85
  chatbot_ui = gr.Chatbot(label="ChatBot")
 
87
 
88
  msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
89
 
90
+ demo.launch(debug=True)