File size: 1,682 Bytes
3754f8b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
from __future__ import annotations
import base64
import io
from typing import List, Optional, Literal
from pydantic import BaseModel, Field, validator
class UploadedFilePayload(BaseModel):
name: str
content: str # Base64
def to_file(self) -> io.BytesIO:
data = base64.b64decode(self.content)
buffer = io.BytesIO(data)
buffer.name = self.name
buffer.seek(0)
return buffer
class ChatMessagePayload(BaseModel):
role: Literal["system", "user", "assistant"]
content: str
class ChatRequest(BaseModel):
history: List[ChatMessagePayload] = Field(default_factory=list)
message: str = Field("", min_length=0)
mode: str = "Generar código"
language: str = "Python"
extra: Optional[str] = ""
exclude: Optional[str] = ""
template: Optional[str] = ""
model: str = "llama3.1:8b"
temperature: float = 0.35
top_p: float = 0.9
max_tokens: int = 2048
files: List[UploadedFilePayload] = Field(default_factory=list)
@validator("message")
def message_not_empty(cls, value: str) -> str:
if not value.strip():
raise ValueError("El mensaje no puede estar vacío.")
return value
class ContinueRequest(BaseModel):
history: List[ChatMessagePayload] = Field(default_factory=list)
system_prompt: str
user_prompt: str
assistant_text: str
model: str
temperature: float = 0.35
top_p: float = 0.9
max_tokens: int = 2048
class DiffRequest(BaseModel):
before: str = ""
after: str = ""
class HistoryPayload(BaseModel):
history: List[ChatMessagePayload] = Field(default_factory=list)
name: Optional[str] = None
|