KaiquanMah commited on
Commit
0c99a5f
Β·
verified Β·
1 Parent(s): 18fc979

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -21
app.py CHANGED
@@ -1,37 +1,64 @@
1
- import gradio as gr
2
- import random
3
- from smolagents import GradioUI, CodeAgent, HfApiModel
 
 
 
 
 
4
  # added
5
  import os
 
 
6
 
7
- # Import our custom tools from their modules
8
- from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
9
- from retriever import load_guest_dataset
10
 
11
  # Initialize the Hugging Face model
12
  # added token
13
  HF_TOKEN = os.environ['HF_TOKEN']
14
- model = HfApiModel(token=HF_TOKEN)
 
 
 
 
 
15
 
16
- # Initialize the web search tool
17
- search_tool = DuckDuckGoSearchTool()
 
18
 
19
- # Initialize the weather tool
20
- weather_info_tool = WeatherInfoTool()
 
21
 
22
- # Initialize the Hub stats tool
23
- hub_stats_tool = HubStatsTool()
 
 
24
 
25
- # Load the guest dataset and initialize the guest info tool
26
- guest_info_tool = load_guest_dataset()
27
 
28
- # Create Alfred with all the tools
29
- alfred = CodeAgent(
30
- tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
31
- model=model,
32
- add_base_tools=True, # Add any additional base tools
33
- planning_interval=3 # Enable planning every 3 steps
 
 
 
 
 
34
  )
 
 
 
 
 
 
 
 
 
35
 
36
  if __name__ == "__main__":
37
  GradioUI(alfred).launch()
 
1
+ from typing import TypedDict, Annotated
2
+ from langgraph.graph.message import add_messages
3
+ from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
4
+ from langgraph.prebuilt import ToolNode
5
+ from langgraph.graph import START, StateGraph
6
+ from langgraph.prebuilt import tools_condition
7
+ from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
8
+
9
  # added
10
  import os
11
+ from retriever import guest_info_tool
12
+
13
 
 
 
 
14
 
15
  # Initialize the Hugging Face model
16
  # added token
17
  HF_TOKEN = os.environ['HF_TOKEN']
18
+ llm = HuggingFaceEndpoint(
19
+ repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
20
+ huggingfacehub_api_token=HF_TOKEN,
21
+ )
22
+
23
+
24
 
25
+ chat = ChatHuggingFace(llm=llm, verbose=True)
26
+ tools = [guest_info_tool]
27
+ chat_with_tools = chat.bind_tools(tools)
28
 
29
+ # Generate the AgentState and Agent graph
30
+ class AgentState(TypedDict):
31
+ messages: Annotated[list[AnyMessage], add_messages]
32
 
33
+ def assistant(state: AgentState):
34
+ return {
35
+ "messages": [chat_with_tools.invoke(state["messages"])],
36
+ }
37
 
38
+ ## The graph
39
+ builder = StateGraph(AgentState)
40
 
41
+ # Define nodes: these do the work
42
+ builder.add_node("assistant", assistant)
43
+ builder.add_node("tools", ToolNode(tools))
44
+
45
+ # Define edges: these determine how the control flow moves
46
+ builder.add_edge(START, "assistant")
47
+ builder.add_conditional_edges(
48
+ "assistant",
49
+ # If the latest message requires a tool, route to tools
50
+ # Otherwise, provide a direct response
51
+ tools_condition,
52
  )
53
+ builder.add_edge("tools", "assistant")
54
+ alfred = builder.compile()
55
+
56
+ messages = [HumanMessage(content="Tell me about our guest named 'Lady Ada Lovelace'.")]
57
+ response = alfred.invoke({"messages": messages})
58
+
59
+ print("🎩 Alfred's Response:")
60
+ print(response['messages'][-1].content)
61
+
62
 
63
  if __name__ == "__main__":
64
  GradioUI(alfred).launch()