Eddyhzd commited on
Commit
2fc1a4a
·
1 Parent(s): 3b313c8
Files changed (1) hide show
  1. app.py +73 -58
app.py CHANGED
@@ -1,68 +1,83 @@
1
- import gradio as gr
2
- from openai import OpenAI
 
 
 
 
3
  import os
4
- import json
5
- import requests
6
-
7
- cle_api = os.environ.get("CLE_API_MISTRAL")
8
- MCP_URL = "https://hackathoncra-gradio-mcp.hf.space/gradio_api/mcp/"
9
-
10
- # Initialisation du client Mistral (API compatible OpenAI)
11
- clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
12
-
13
- def call_mcp(payload: dict):
14
- """Appel simple au serveur MCP Gradio"""
15
- headers = {"Content-Type": "application/json"}
16
- response = requests.post(MCP_URL, data=json.dumps(payload), headers=headers)
17
- print(response.json())
18
- response.raise_for_status()
19
- return response.json()
20
-
21
- # Chatbot avec Mistral + MCP
22
- def chatbot(message, history):
23
- # Préparer l’historique pour Mistral
24
- messages = []
25
- for user_msg, bot_msg in history:
26
- messages.append({"role": "user", "content": user_msg})
27
- messages.append({"role": "assistant", "content": bot_msg})
28
-
29
- messages.append({"role": "user", "content": message})
30
-
31
- # Appel API Mistral
32
- response = clientLLM.chat.completions.create(
33
- model="mistral-small-latest",
34
- messages=messages
35
  )
36
 
37
- bot_reply = response.choices[0].message.content.strip()
38
 
39
- # Vérifier si la réponse contient un JSON MCP
40
- try:
41
- mcp_payload = json.loads(bot_reply)
42
- mcp_result = call_mcp(mcp_payload)
43
- bot_reply = f"Réponse via MCP:\n{json.dumps(mcp_result, indent=2)}"
44
- except json.JSONDecodeError:
45
- # Pas de JSON MCP, réponse normale
46
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
- history.append(("Vous: " + message, "Bot: " + bot_reply))
49
- return history, history
 
 
 
50
 
 
 
 
 
 
 
 
51
 
52
- def call_mcp(payload: dict):
53
- """
54
- Fonction générique pour interroger le serveur MCP hébergé sur Gradio.
55
- """
56
- headers = {"Content-Type": "application/json"}
57
- response = requests.post(MCP_URL, data=json.dumps(payload), headers=headers)
58
- response.raise_for_status()
59
- return response.json()
60
-
61
- with gr.Blocks() as demo:
62
 
63
- chatbot_ui = gr.Chatbot(label="ChatBot")
64
- msg = gr.Textbox(placeholder="Écrivez un message...")
 
65
 
66
- msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
67
 
68
- demo.launch(debug=True)
 
 
1
+ """
2
+ cd to the `examples/snippets/clients` directory and run:
3
+ uv run client
4
+ """
5
+
6
+ import asyncio
7
  import os
8
+
9
+ from pydantic import AnyUrl
10
+
11
+ from mcp import ClientSession, StdioServerParameters, types
12
+ from mcp.client.stdio import stdio_client
13
+ from mcp.shared.context import RequestContext
14
+
15
+ # Create server parameters for stdio connection
16
+ server_params = StdioServerParameters(
17
+ command="uv", # Using uv to run the server
18
+ args=["run", "server", "fastmcp_quickstart", "stdio"], # We're already in snippets dir
19
+ env={"UV_INDEX": os.environ.get("UV_INDEX", "")},
20
+ )
21
+
22
+
23
+ # Optional: create a sampling callback
24
+ async def handle_sampling_message(
25
+ context: RequestContext[ClientSession, None], params: types.CreateMessageRequestParams
26
+ ) -> types.CreateMessageResult:
27
+ print(f"Sampling request: {params.messages}")
28
+ return types.CreateMessageResult(
29
+ role="assistant",
30
+ content=types.TextContent(
31
+ type="text",
32
+ text="Hello, world! from model",
33
+ ),
34
+ model="gpt-3.5-turbo",
35
+ stopReason="endTurn",
 
 
 
36
  )
37
 
 
38
 
39
+ async def run():
40
+ async with stdio_client(server_params) as (read, write):
41
+ async with ClientSession(read, write, sampling_callback=handle_sampling_message) as session:
42
+ # Initialize the connection
43
+ await session.initialize()
44
+
45
+ # List available prompts
46
+ prompts = await session.list_prompts()
47
+ print(f"Available prompts: {[p.name for p in prompts.prompts]}")
48
+
49
+ # Get a prompt (greet_user prompt from fastmcp_quickstart)
50
+ if prompts.prompts:
51
+ prompt = await session.get_prompt("greet_user", arguments={"name": "Alice", "style": "friendly"})
52
+ print(f"Prompt result: {prompt.messages[0].content}")
53
+
54
+ # List available resources
55
+ resources = await session.list_resources()
56
+ print(f"Available resources: {[r.uri for r in resources.resources]}")
57
+
58
+ # List available tools
59
+ tools = await session.list_tools()
60
+ print(f"Available tools: {[t.name for t in tools.tools]}")
61
 
62
+ # Read a resource (greeting resource from fastmcp_quickstart)
63
+ resource_content = await session.read_resource(AnyUrl("greeting://World"))
64
+ content_block = resource_content.contents[0]
65
+ if isinstance(content_block, types.TextContent):
66
+ print(f"Resource content: {content_block.text}")
67
 
68
+ # Call a tool (add tool from fastmcp_quickstart)
69
+ result = await session.call_tool("add", arguments={"a": 5, "b": 3})
70
+ result_unstructured = result.content[0]
71
+ if isinstance(result_unstructured, types.TextContent):
72
+ print(f"Tool result: {result_unstructured.text}")
73
+ result_structured = result.structuredContent
74
+ print(f"Structured tool result: {result_structured}")
75
 
 
 
 
 
 
 
 
 
 
 
76
 
77
+ def main():
78
+ """Entry point for the client script."""
79
+ asyncio.run(run())
80
 
 
81
 
82
+ if __name__ == "__main__":
83
+ main()