jeanmarcocruz207's picture
Upload 29 files
3754f8b verified
from __future__ import annotations
import json
from datetime import datetime
from pathlib import Path
from typing import Iterable, List
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from config import settings
from ollama_utils import (
ask_ollama_stream,
list_models,
pull_model_with_progress,
start_ollama,
verify,
)
from ui_logic import (
concat_code_blocks,
extract_code_blocks,
generate_diff,
prepare_inputs,
)
from backend.schemas import (
ChatRequest,
ContinueRequest,
DiffRequest,
HistoryPayload,
UploadedFilePayload,
)
from backend.utils import messages_to_pairs
from ui_logic import do_export, do_load_chat, do_save_chat
app = FastAPI(
title="Llama Dev Suite PRO Backend",
version="1.0.0",
description="API conversacional para Llama Dev Suite PRO",
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
def _decode_files(files: Iterable[UploadedFilePayload]):
return [payload.to_file() for payload in files]
def _normalize_text(value: str | None) -> str:
return value.strip() if value else ""
@app.get("/api/health")
def health():
return {"status": "ok", "time": datetime.utcnow().isoformat()}
@app.get("/api/status")
def status():
return {"status": verify()}
@app.post("/api/ollama/start")
def start():
return {"status": start_ollama()}
@app.get("/api/models")
def models():
return {"models": list_models()}
@app.post("/api/ollama/pull")
def pull_model(payload: dict):
model_name = payload.get("model")
if not model_name:
raise HTTPException(status_code=400, detail="Falta 'model'.")
messages: List[str] = []
for update in pull_model_with_progress(model_name):
messages.append(update)
return {"events": messages}
def _build_chat_stream(req: ChatRequest):
decoded_files = _decode_files(req.files)
system_prompt, user_prompt, preview, first_code = prepare_inputs(
decoded_files,
_normalize_text(req.exclude),
_normalize_text(req.extra),
req.mode,
req.language,
req.message,
_normalize_text(req.template),
)
history_pairs = messages_to_pairs(req.history)
def event_iterator():
assistant_accum = ""
try:
for chunk in ask_ollama_stream(
req.model,
system_prompt,
history_pairs,
user_prompt,
req.temperature,
req.top_p,
req.max_tokens,
):
assistant_accum += chunk
payload = {"type": "chunk", "content": chunk}
yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
code_blocks = extract_code_blocks(assistant_accum)
lang_main, code_concat = concat_code_blocks(code_blocks)
legacy_history = [
{"role": msg.role, "content": msg.content} for msg in req.history
]
legacy_history.append({"role": "user", "content": req.message})
legacy_history.append({"role": "assistant", "content": assistant_accum})
payload = {
"type": "final",
"message": {"role": "assistant", "content": assistant_accum},
"history": legacy_history,
"code": code_concat,
"code_language": lang_main,
"preview": preview,
"first_code": first_code,
"system_prompt": system_prompt,
"user_prompt": user_prompt,
}
yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
except Exception as exc: # pragma: no cover
error_payload = {
"type": "error",
"message": str(exc),
}
yield f"data: {json.dumps(error_payload, ensure_ascii=False)}\n\n"
return event_iterator
@app.post("/api/chat")
def chat(req: ChatRequest):
iterator = _build_chat_stream(req)
return StreamingResponse(iterator(), media_type="text/event-stream")
@app.post("/api/chat/continue")
def chat_continue(req: ContinueRequest):
cont_prompt = (
req.user_prompt
+ "\n\nEl asistente respondió esto parcialmente:\n"
+ req.assistant_text[-2000:]
+ "\n\nContinúa exactamente donde lo dejaste, sin repetir."
)
history_pairs = messages_to_pairs(req.history)
def event_iterator():
accumulated = req.assistant_text
try:
for chunk in ask_ollama_stream(
req.model,
req.system_prompt,
history_pairs,
cont_prompt,
req.temperature,
req.top_p,
req.max_tokens,
):
accumulated += chunk
payload = {"type": "chunk", "content": chunk}
yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
code_blocks = extract_code_blocks(accumulated)
lang_main, code_concat = concat_code_blocks(code_blocks)
legacy_history = [
{"role": msg.role, "content": msg.content} for msg in req.history
]
if legacy_history and legacy_history[-1]["role"] == "assistant":
legacy_history[-1]["content"] = accumulated
else:
legacy_history.append({"role": "assistant", "content": accumulated})
payload = {
"type": "final",
"message": {"role": "assistant", "content": accumulated},
"history": legacy_history,
"code": code_concat,
"code_language": lang_main,
}
yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
except Exception as exc: # pragma: no cover
error_payload = {"type": "error", "message": str(exc)}
yield f"data: {json.dumps(error_payload, ensure_ascii=False)}\n\n"
return StreamingResponse(event_iterator(), media_type="text/event-stream")
@app.post("/api/diff")
def diff(req: DiffRequest):
return {"diff": generate_diff(req.before, req.after)}
@app.get("/api/chats")
def list_saved():
files = sorted(
settings.CHAT_SAVE_PATH.glob("*.json"),
key=lambda x: x.stat().st_mtime,
reverse=True,
)
return {"items": [str(f) for f in files]}
@app.get("/api/chats/{path:path}")
def load_chat(path: str):
history, status = do_load_chat(path)
if not history:
raise HTTPException(status_code=404, detail=status)
return {"history": history, "status": status}
@app.post("/api/chats/save")
def save_chat(payload: HistoryPayload):
message = do_save_chat(
[{"role": msg.role, "content": msg.content} for msg in payload.history],
payload.name or "",
)
if message.startswith("⚠️"):
raise HTTPException(status_code=400, detail=message)
return {"status": message}
@app.post("/api/export")
def export_chat(payload: HistoryPayload):
file_component = do_export(
[{"role": msg.role, "content": msg.content} for msg in payload.history]
)
if not file_component or not getattr(file_component, "value", None):
raise HTTPException(status_code=400, detail="No hay conversación para exportar.")
file_path = Path(file_component.value)
if not file_path.exists():
raise HTTPException(status_code=500, detail="Archivo de exportación no disponible.")
return FileResponse(path=file_path, filename=file_path.name)
FRONTEND_DIST = Path(__file__).resolve().parent.parent / "frontend" / "dist"
if FRONTEND_DIST.exists():
app.mount(
"/assets",
StaticFiles(directory=str(FRONTEND_DIST / "assets")),
name="assets",
)
@app.get("/", include_in_schema=False)
async def serve_index():
index_path = FRONTEND_DIST / "index.html"
if index_path.exists():
return FileResponse(index_path)
raise HTTPException(status_code=404, detail="Recurso no encontrado.")
@app.get("/{full_path:path}", include_in_schema=False)
async def serve_frontend(full_path: str):
if full_path.startswith("api/"):
raise HTTPException(status_code=404, detail="Ruta API no encontrada.")
index_path = FRONTEND_DIST / "index.html"
if index_path.exists():
return FileResponse(index_path)
raise HTTPException(status_code=404, detail="Recurso no encontrado.")