serverdaun commited on
Commit
724c738
·
1 Parent(s): 0e29489

Refactor Sidekick initialization to accept a memory file parameter and add clear memory functionality in the Gradio UI.

Browse files
Files changed (2) hide show
  1. app.py +18 -2
  2. sidekick.py +23 -8
app.py CHANGED
@@ -1,12 +1,14 @@
 
1
  from typing import Any, List, Tuple
2
 
3
  import gradio as gr
4
 
 
5
  from sidekick import Sidekick
6
 
7
 
8
  async def setup() -> Sidekick:
9
- sidekick = Sidekick()
10
  await sidekick.setup()
11
  return sidekick
12
 
@@ -26,11 +28,21 @@ async def reset(sidekick: Sidekick) -> Tuple[str, str, None, Sidekick]:
26
  except Exception:
27
  pass # Ignore cleanup failures during reset
28
 
29
- new_sidekick = Sidekick()
30
  await new_sidekick.setup()
31
  return "", "", None, new_sidekick
32
 
33
 
 
 
 
 
 
 
 
 
 
 
34
  def free_resources(sidekick: Sidekick) -> None:
35
  try:
36
  if sidekick:
@@ -64,6 +76,7 @@ with gr.Blocks(title="Sidekick", theme=gr.themes.Default(primary_hue="emerald"))
64
  with gr.Row():
65
  reset_button = gr.Button("Reset", variant="stop")
66
  go_button = gr.Button("Go!", variant="primary")
 
67
 
68
  ui.load(setup, [], [sidekick])
69
  message.submit(
@@ -84,6 +97,9 @@ with gr.Blocks(title="Sidekick", theme=gr.themes.Default(primary_hue="emerald"))
84
  reset_button.click(
85
  reset, [sidekick], [message, success_criteria, chatbot, sidekick]
86
  )
 
 
 
87
 
88
 
89
  if __name__ == "__main__":
 
1
+ import os
2
  from typing import Any, List, Tuple
3
 
4
  import gradio as gr
5
 
6
+ from config import MEMORY_FILE
7
  from sidekick import Sidekick
8
 
9
 
10
  async def setup() -> Sidekick:
11
+ sidekick = Sidekick(memory_file=MEMORY_FILE)
12
  await sidekick.setup()
13
  return sidekick
14
 
 
28
  except Exception:
29
  pass # Ignore cleanup failures during reset
30
 
31
+ new_sidekick = Sidekick(memory_file=MEMORY_FILE)
32
  await new_sidekick.setup()
33
  return "", "", None, new_sidekick
34
 
35
 
36
+ async def clear_memory(sidekick: Sidekick) -> Tuple[str, str, None, Sidekick]:
37
+ """Delete the persistent memory file."""
38
+ try:
39
+ os.remove(MEMORY_FILE)
40
+ except FileNotFoundError:
41
+ pass
42
+
43
+ return await reset(sidekick)
44
+
45
+
46
  def free_resources(sidekick: Sidekick) -> None:
47
  try:
48
  if sidekick:
 
76
  with gr.Row():
77
  reset_button = gr.Button("Reset", variant="stop")
78
  go_button = gr.Button("Go!", variant="primary")
79
+ clear_memory_button = gr.Button("Clear Memory", variant="secondary")
80
 
81
  ui.load(setup, [], [sidekick])
82
  message.submit(
 
97
  reset_button.click(
98
  reset, [sidekick], [message, success_criteria, chatbot, sidekick]
99
  )
100
+ clear_memory_button.click(
101
+ clear_memory, [sidekick], [message, success_criteria, chatbot, sidekick]
102
+ )
103
 
104
 
105
  if __name__ == "__main__":
sidekick.py CHANGED
@@ -3,7 +3,7 @@ import uuid
3
  from datetime import datetime
4
  from typing import Annotated, Any, List, Literal, Optional
5
 
6
- from dotenv import load_dotenv
7
  from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
8
  from langchain_openai import ChatOpenAI
9
  from langgraph.checkpoint.memory import MemorySaver
@@ -12,10 +12,9 @@ from langgraph.graph.message import add_messages
12
  from langgraph.prebuilt import ToolNode
13
  from pydantic import BaseModel, Field
14
 
 
15
  from tools import get_all_tools_with_browser
16
 
17
- load_dotenv(override=True)
18
-
19
 
20
  class State(BaseModel):
21
  """State for the Sidekick"""
@@ -42,7 +41,9 @@ class EvaluatorOutput(BaseModel):
42
  class Sidekick:
43
  """Sidekick class"""
44
 
45
- def __init__(self):
 
 
46
  self.worker_llm_with_tools = None
47
  self.evaluator_llm_with_output = None
48
  self.tools = None
@@ -52,14 +53,16 @@ class Sidekick:
52
  self.memory = MemorySaver()
53
  self.browser = None
54
  self.playwright = None
 
55
 
56
  async def setup(self) -> None:
57
  """Setup function"""
58
 
59
  self.tools, self.browser, self.playwright = await get_all_tools_with_browser()
60
- worker_llm = ChatOpenAI(model="gpt-4o-mini")
61
  self.worker_llm_with_tools = worker_llm.bind_tools(self.tools)
62
- evaluator_llm = ChatOpenAI(model="gpt-4o-mini")
 
63
  self.evaluator_llm_with_output = evaluator_llm.with_structured_output(
64
  EvaluatorOutput
65
  )
@@ -223,8 +226,12 @@ If you're seeing the Assistant repeating the same mistakes, then consider respon
223
 
224
  config = {"configurable": {"thread_id": self.sidekick_id}}
225
 
 
 
 
 
226
  state = {
227
- "messages": [HumanMessage(content=message)],
228
  "success_criteria": success_criteria
229
  or "The answer should be clear and accurate",
230
  "feedback_on_work": None,
@@ -232,8 +239,16 @@ If you're seeing the Assistant repeating the same mistakes, then consider respon
232
  "user_input_needed": False,
233
  }
234
  result = await self.graph.ainvoke(state, config=config)
 
 
 
 
 
 
 
 
235
  user = {"role": "user", "content": message}
236
- reply = {"role": "assistant", "content": result["messages"][-2].content}
237
  feedback = {"role": "assistant", "content": result["messages"][-1].content}
238
  return history + [user, reply, feedback]
239
 
 
3
  from datetime import datetime
4
  from typing import Annotated, Any, List, Literal, Optional
5
 
6
+ from langchain_community.chat_message_histories import FileChatMessageHistory
7
  from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
8
  from langchain_openai import ChatOpenAI
9
  from langgraph.checkpoint.memory import MemorySaver
 
12
  from langgraph.prebuilt import ToolNode
13
  from pydantic import BaseModel, Field
14
 
15
+ from config import OPENAI_CHAT_MODEL_EVALUATOR, OPENAI_CHAT_MODEL_WORKER
16
  from tools import get_all_tools_with_browser
17
 
 
 
18
 
19
  class State(BaseModel):
20
  """State for the Sidekick"""
 
41
  class Sidekick:
42
  """Sidekick class"""
43
 
44
+ def __init__(self, memory_file: str = "sidekick_memory.json"):
45
+ """Initialize the Sidekick"""
46
+
47
  self.worker_llm_with_tools = None
48
  self.evaluator_llm_with_output = None
49
  self.tools = None
 
53
  self.memory = MemorySaver()
54
  self.browser = None
55
  self.playwright = None
56
+ self.chat_memory = FileChatMessageHistory(file_path=memory_file)
57
 
58
  async def setup(self) -> None:
59
  """Setup function"""
60
 
61
  self.tools, self.browser, self.playwright = await get_all_tools_with_browser()
62
+ worker_llm = ChatOpenAI(model=OPENAI_CHAT_MODEL_WORKER)
63
  self.worker_llm_with_tools = worker_llm.bind_tools(self.tools)
64
+
65
+ evaluator_llm = ChatOpenAI(model=OPENAI_CHAT_MODEL_EVALUATOR)
66
  self.evaluator_llm_with_output = evaluator_llm.with_structured_output(
67
  EvaluatorOutput
68
  )
 
226
 
227
  config = {"configurable": {"thread_id": self.sidekick_id}}
228
 
229
+ # Load persistent conversation history.
230
+ persistent_messages = self.chat_memory.messages if self.chat_memory else []
231
+
232
+ # Build the initial state for the graph including persistent history and the new user message.
233
  state = {
234
+ "messages": persistent_messages + [HumanMessage(content=message)],
235
  "success_criteria": success_criteria
236
  or "The answer should be clear and accurate",
237
  "feedback_on_work": None,
 
239
  "user_input_needed": False,
240
  }
241
  result = await self.graph.ainvoke(state, config=config)
242
+
243
+ # Update the persistent memory with the new user and assistant messages
244
+ self.chat_memory.add_user_message(message)
245
+
246
+ # The assistant's reply is the second to last message (the last one is evaluator feedback)
247
+ assistant_reply_content = result["messages"][-2].content
248
+ self.chat_memory.add_ai_message(assistant_reply_content)
249
+
250
  user = {"role": "user", "content": message}
251
+ reply = {"role": "assistant", "content": assistant_reply_content}
252
  feedback = {"role": "assistant", "content": result["messages"][-1].content}
253
  return history + [user, reply, feedback]
254