File size: 8,732 Bytes
3754f8b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
from __future__ import annotations

import json
from datetime import datetime
from pathlib import Path
from typing import Iterable, List

from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles

from config import settings
from ollama_utils import (
    ask_ollama_stream,
    list_models,
    pull_model_with_progress,
    start_ollama,
    verify,
)
from ui_logic import (
    concat_code_blocks,
    extract_code_blocks,
    generate_diff,
    prepare_inputs,
)
from backend.schemas import (
    ChatRequest,
    ContinueRequest,
    DiffRequest,
    HistoryPayload,
    UploadedFilePayload,
)
from backend.utils import messages_to_pairs
from ui_logic import do_export, do_load_chat, do_save_chat

app = FastAPI(
    title="Llama Dev Suite PRO Backend",
    version="1.0.0",
    description="API conversacional para Llama Dev Suite PRO",
)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_methods=["*"],
    allow_headers=["*"],
)


def _decode_files(files: Iterable[UploadedFilePayload]):
    return [payload.to_file() for payload in files]


def _normalize_text(value: str | None) -> str:
    return value.strip() if value else ""


@app.get("/api/health")
def health():
    return {"status": "ok", "time": datetime.utcnow().isoformat()}


@app.get("/api/status")
def status():
    return {"status": verify()}


@app.post("/api/ollama/start")
def start():
    return {"status": start_ollama()}


@app.get("/api/models")
def models():
    return {"models": list_models()}


@app.post("/api/ollama/pull")
def pull_model(payload: dict):
    model_name = payload.get("model")
    if not model_name:
        raise HTTPException(status_code=400, detail="Falta 'model'.")

    messages: List[str] = []
    for update in pull_model_with_progress(model_name):
        messages.append(update)
    return {"events": messages}


def _build_chat_stream(req: ChatRequest):
    decoded_files = _decode_files(req.files)
    system_prompt, user_prompt, preview, first_code = prepare_inputs(
        decoded_files,
        _normalize_text(req.exclude),
        _normalize_text(req.extra),
        req.mode,
        req.language,
        req.message,
        _normalize_text(req.template),
    )

    history_pairs = messages_to_pairs(req.history)

    def event_iterator():
        assistant_accum = ""
        try:
            for chunk in ask_ollama_stream(
                req.model,
                system_prompt,
                history_pairs,
                user_prompt,
                req.temperature,
                req.top_p,
                req.max_tokens,
            ):
                assistant_accum += chunk
                payload = {"type": "chunk", "content": chunk}
                yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"

            code_blocks = extract_code_blocks(assistant_accum)
            lang_main, code_concat = concat_code_blocks(code_blocks)
            legacy_history = [
                {"role": msg.role, "content": msg.content} for msg in req.history
            ]
            legacy_history.append({"role": "user", "content": req.message})
            legacy_history.append({"role": "assistant", "content": assistant_accum})

            payload = {
                "type": "final",
                "message": {"role": "assistant", "content": assistant_accum},
                "history": legacy_history,
                "code": code_concat,
                "code_language": lang_main,
                "preview": preview,
                "first_code": first_code,
                "system_prompt": system_prompt,
                "user_prompt": user_prompt,
            }
            yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
        except Exception as exc:  # pragma: no cover
            error_payload = {
                "type": "error",
                "message": str(exc),
            }
            yield f"data: {json.dumps(error_payload, ensure_ascii=False)}\n\n"

    return event_iterator


@app.post("/api/chat")
def chat(req: ChatRequest):
    iterator = _build_chat_stream(req)
    return StreamingResponse(iterator(), media_type="text/event-stream")


@app.post("/api/chat/continue")
def chat_continue(req: ContinueRequest):
    cont_prompt = (
        req.user_prompt
        + "\n\nEl asistente respondi贸 esto parcialmente:\n"
        + req.assistant_text[-2000:]
        + "\n\nContin煤a exactamente donde lo dejaste, sin repetir."
    )

    history_pairs = messages_to_pairs(req.history)

    def event_iterator():
        accumulated = req.assistant_text
        try:
            for chunk in ask_ollama_stream(
                req.model,
                req.system_prompt,
                history_pairs,
                cont_prompt,
                req.temperature,
                req.top_p,
                req.max_tokens,
            ):
                accumulated += chunk
                payload = {"type": "chunk", "content": chunk}
                yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"

            code_blocks = extract_code_blocks(accumulated)
            lang_main, code_concat = concat_code_blocks(code_blocks)

            legacy_history = [
                {"role": msg.role, "content": msg.content} for msg in req.history
            ]
            if legacy_history and legacy_history[-1]["role"] == "assistant":
                legacy_history[-1]["content"] = accumulated
            else:
                legacy_history.append({"role": "assistant", "content": accumulated})

            payload = {
                "type": "final",
                "message": {"role": "assistant", "content": accumulated},
                "history": legacy_history,
                "code": code_concat,
                "code_language": lang_main,
            }
            yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
        except Exception as exc:  # pragma: no cover
            error_payload = {"type": "error", "message": str(exc)}
            yield f"data: {json.dumps(error_payload, ensure_ascii=False)}\n\n"

    return StreamingResponse(event_iterator(), media_type="text/event-stream")


@app.post("/api/diff")
def diff(req: DiffRequest):
    return {"diff": generate_diff(req.before, req.after)}


@app.get("/api/chats")
def list_saved():
    files = sorted(
        settings.CHAT_SAVE_PATH.glob("*.json"),
        key=lambda x: x.stat().st_mtime,
        reverse=True,
    )
    return {"items": [str(f) for f in files]}


@app.get("/api/chats/{path:path}")
def load_chat(path: str):
    history, status = do_load_chat(path)
    if not history:
        raise HTTPException(status_code=404, detail=status)
    return {"history": history, "status": status}


@app.post("/api/chats/save")
def save_chat(payload: HistoryPayload):
    message = do_save_chat(
        [{"role": msg.role, "content": msg.content} for msg in payload.history],
        payload.name or "",
    )
    if message.startswith("鈿狅笍"):
        raise HTTPException(status_code=400, detail=message)
    return {"status": message}


@app.post("/api/export")
def export_chat(payload: HistoryPayload):
    file_component = do_export(
        [{"role": msg.role, "content": msg.content} for msg in payload.history]
    )
    if not file_component or not getattr(file_component, "value", None):
        raise HTTPException(status_code=400, detail="No hay conversaci贸n para exportar.")
    file_path = Path(file_component.value)
    if not file_path.exists():
        raise HTTPException(status_code=500, detail="Archivo de exportaci贸n no disponible.")
    return FileResponse(path=file_path, filename=file_path.name)


FRONTEND_DIST = Path(__file__).resolve().parent.parent / "frontend" / "dist"

if FRONTEND_DIST.exists():
    app.mount(
        "/assets",
        StaticFiles(directory=str(FRONTEND_DIST / "assets")),
        name="assets",
    )

    @app.get("/", include_in_schema=False)
    async def serve_index():
        index_path = FRONTEND_DIST / "index.html"
        if index_path.exists():
            return FileResponse(index_path)
        raise HTTPException(status_code=404, detail="Recurso no encontrado.")

    @app.get("/{full_path:path}", include_in_schema=False)
    async def serve_frontend(full_path: str):
        if full_path.startswith("api/"):
            raise HTTPException(status_code=404, detail="Ruta API no encontrada.")
        index_path = FRONTEND_DIST / "index.html"
        if index_path.exists():
            return FileResponse(index_path)
        raise HTTPException(status_code=404, detail="Recurso no encontrado.")