Spaces:
Sleeping
Sleeping
| from __future__ import annotations | |
| import json | |
| import logging | |
| from typing import Any, Dict, List | |
| from mission_planner import MissionPlan | |
| from prompt import mission_summarizer_system_prompt, mission_summarizer_user_prompt | |
| from utils.openai_client import get_openai_client | |
| SUMMARY_MODEL = "gpt-4o-mini" | |
| def _trim_detections(detections: List[Dict[str, Any]], max_boxes: int = 5) -> List[Dict[str, Any]]: | |
| if len(detections) <= max_boxes: | |
| return detections | |
| return detections[:max_boxes] | |
| def _build_context_snapshot(records: List[Dict[str, Any]]) -> Dict[str, Any]: | |
| if not records: | |
| return {} | |
| first = records[0] | |
| middle = records[len(records) // 2] | |
| last = records[-1] | |
| return { | |
| "first_frame": { | |
| "frame_index": first["frame_index"], | |
| "detections": _trim_detections(first.get("detections", [])), | |
| }, | |
| "middle_frame": { | |
| "frame_index": middle["frame_index"], | |
| "detections": _trim_detections(middle.get("detections", [])), | |
| }, | |
| "last_frame": { | |
| "frame_index": last["frame_index"], | |
| "detections": _trim_detections(last.get("detections", [])), | |
| }, | |
| } | |
| def summarize_results( | |
| mission_prompt: str, | |
| mission_plan: MissionPlan, | |
| detection_log: List[Dict[str, Any]], | |
| ) -> str: | |
| if not detection_log: | |
| return "No detections were produced, so no summary is available." | |
| context_snapshot = _build_context_snapshot(detection_log) | |
| payload = { | |
| "mission_prompt": mission_prompt, | |
| "mission_plan": mission_plan.to_dict(), | |
| "global_context": context_snapshot, | |
| "detection_log": [ | |
| { | |
| "frame_index": entry["frame_index"], | |
| "detections": _trim_detections(entry.get("detections", []), max_boxes=8), | |
| } | |
| for entry in detection_log | |
| ], | |
| } | |
| system_prompt = mission_summarizer_system_prompt() | |
| messages = [ | |
| {"role": "system", "content": system_prompt}, | |
| { | |
| "role": "user", | |
| "content": mission_summarizer_user_prompt(json.dumps(payload, ensure_ascii=False)), | |
| }, | |
| ] | |
| try: | |
| client = get_openai_client() | |
| completion = client.chat.completions.create( | |
| model=SUMMARY_MODEL, | |
| temperature=0.2, | |
| messages=messages, | |
| ) | |
| return (completion.choices[0].message.content or "").strip() | |
| except Exception: | |
| logging.exception("Failed to generate mission summary.") | |
| return "Mission summary generation failed." | |