|
|
from __future__ import annotations |
|
|
|
|
|
import base64 |
|
|
import io |
|
|
from typing import List, Optional, Literal |
|
|
|
|
|
from pydantic import BaseModel, Field, validator |
|
|
|
|
|
|
|
|
class UploadedFilePayload(BaseModel): |
|
|
name: str |
|
|
content: str |
|
|
|
|
|
def to_file(self) -> io.BytesIO: |
|
|
data = base64.b64decode(self.content) |
|
|
buffer = io.BytesIO(data) |
|
|
buffer.name = self.name |
|
|
buffer.seek(0) |
|
|
return buffer |
|
|
|
|
|
|
|
|
class ChatMessagePayload(BaseModel): |
|
|
role: Literal["system", "user", "assistant"] |
|
|
content: str |
|
|
|
|
|
|
|
|
class ChatRequest(BaseModel): |
|
|
history: List[ChatMessagePayload] = Field(default_factory=list) |
|
|
message: str = Field("", min_length=0) |
|
|
mode: str = "Generar código" |
|
|
language: str = "Python" |
|
|
extra: Optional[str] = "" |
|
|
exclude: Optional[str] = "" |
|
|
template: Optional[str] = "" |
|
|
model: str = "llama3.1:8b" |
|
|
temperature: float = 0.35 |
|
|
top_p: float = 0.9 |
|
|
max_tokens: int = 2048 |
|
|
files: List[UploadedFilePayload] = Field(default_factory=list) |
|
|
|
|
|
@validator("message") |
|
|
def message_not_empty(cls, value: str) -> str: |
|
|
if not value.strip(): |
|
|
raise ValueError("El mensaje no puede estar vacío.") |
|
|
return value |
|
|
|
|
|
|
|
|
class ContinueRequest(BaseModel): |
|
|
history: List[ChatMessagePayload] = Field(default_factory=list) |
|
|
system_prompt: str |
|
|
user_prompt: str |
|
|
assistant_text: str |
|
|
model: str |
|
|
temperature: float = 0.35 |
|
|
top_p: float = 0.9 |
|
|
max_tokens: int = 2048 |
|
|
|
|
|
|
|
|
class DiffRequest(BaseModel): |
|
|
before: str = "" |
|
|
after: str = "" |
|
|
|
|
|
|
|
|
class HistoryPayload(BaseModel): |
|
|
history: List[ChatMessagePayload] = Field(default_factory=list) |
|
|
name: Optional[str] = None |
|
|
|