File size: 1,240 Bytes
80fa9cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# memory_utils.py
import json
import pandas as pd
from datetime import datetime
from pathlib import Path
from typing import Optional, List, Dict, Any


MEMORY_FILE = Path("memories.json")

def load_memories(n: int = 3) -> list[str]:
    """
    Load the last n summarized memories from the JSONL file.
    Returns a list of summary strings.
    """
    if not MEMORY_FILE.exists():
        return []
    with open(MEMORY_FILE, "r", encoding="utf-8") as f:
        lines = [json.loads(l) for l in f if l.strip()]
    return [m["summary"] for m in lines[-n:]]

def save_memory(summary: str):
    """
    Append a new memory summary to the JSONL file.
    Each line is a JSON object: {"timestamp": "...", "summary": "..."}
    """
    entry = {"timestamp": datetime.now().isoformat(), "summary": summary}
    with open(MEMORY_FILE, "a", encoding="utf-8") as f:
        f.write(json.dumps(entry) + "\n")

def load_memories_df(n: Optional[int] = None):
    """
    Return recent memories as a pandas DataFrame (newest first).
    """
    with open("memories.json", "r", encoding="utf-8") as f:
        lines = [json.loads(line) for line in f if line.strip()]
    df = pd.DataFrame(lines)

    return df.tail(n).iloc[::-1] if n else df.iloc[::-1]