jeanmarcocruz207's picture
Upload 29 files
37f2ca6 verified
import gradio as gr
from pathlib import Path
from config import settings
from ollama_utils import (
start_ollama,
verify,
pull_model_with_progress,
list_models,
)
from ui_logic import (
main_chat,
_init_state,
)
# --- NEW CSS FOR CHATGPT-LIKE INTERFACE ---
APP_CSS = """
:root {
--bg-light: #FFFFFF;
--bg-dark: #202123;
--text-light: #202123;
--text-dark: #ECECF1;
--border-light: #E5E5E5;
--border-dark: #4D4D4F;
--input-bg-light: #FFFFFF;
--input-bg-dark: #40414F;
--assistant-bg-light: #F7F7F8;
--assistant-bg-dark: #444654;
--font-primary: 'Inter', 'Figtree', -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
--font-mono: "JetBrains Mono", "Fira Code", ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
--shadow: 0 0 10px rgba(0,0,0,0.1);
--shadow-dark: 0 0 15px rgba(0,0,0,0.5);
}
body, html {
background-color: var(--bg-light);
color: var(--text-light);
font-family: var(--font-primary);
line-height: 1.6;
}
@media (prefers-color-scheme: dark) {
body, html {
background-color: var(--bg-dark);
color: var(--text-dark);
}
}
.gradio-container {
background: transparent !important;
max-width: 820px !important;
margin: 0 auto !important;
padding-top: 2rem !important;
}
#chat-area {
display: flex;
flex-direction: column;
gap: 1.5rem;
}
#chat-canvas {
height: min(520px, calc(100vh - 260px)) !important;
min-height: 320px !important;
overflow-y: auto;
border: none !important;
box-shadow: none !important;
display: flex;
flex-direction: column;
}
#chat-canvas .message-row {
max-width: 100% !important;
}
#chat-canvas .message {
border-radius: 18px !important;
padding: 14px 20px !important;
}
#chat-canvas .user .message {
background: var(--bg-light) !important;
color: var(--text-light) !important;
border: 1px solid var(--border-light) !important;
margin-left: auto;
}
#chat-canvas .bot .message {
background: var(--assistant-bg-light) !important;
color: var(--text-light) !important;
border: 1px solid var(--border-light) !important;
}
@media (prefers-color-scheme: dark) {
#chat-canvas .user .message {
background: var(--bg-dark) !important;
color: var(--text-dark) !important;
border-color: var(--border-dark) !important;
}
#chat-canvas .bot .message {
background: var(--assistant-bg-dark) !important;
color: var(--text-dark) !important;
border-color: var(--border-dark) !important;
}
}
#prompt-bar {
border-top: 1px solid var(--border-light);
background: var(--bg-light);
padding: 1.5rem 0;
display: flex;
gap: 0.75rem;
align-items: flex-end;
}
@media (prefers-color-scheme: dark) {
#prompt-bar {
border-top: 1px solid var(--border-dark);
background: var(--bg-dark);
}
}
#prompt-input {
background-color: var(--input-bg-light) !important;
border: 1px solid var(--border-light) !important;
border-radius: 18px !important;
box-shadow: var(--shadow) !important;
color: var(--text-light) !important;
padding: 16px !important;
}
#prompt-bar .gr-textbox {
flex: 1;
}
#prompt-bar .gr-button {
min-width: 110px !important;
}
@media (prefers-color-scheme: dark) {
#prompt-input {
background-color: var(--input-bg-dark) !important;
border-color: var(--border-dark) !important;
box-shadow: var(--shadow-dark) !important;
color: var(--text-dark) !important;
}
}
#header {
display: flex;
justify-content: center;
align-items: center;
padding-bottom: 1rem;
width: 100%;
}
#header h1 {
font-size: 1.5rem;
font-weight: 600;
margin-right: 20px;
}
#model-selector {
min-width: 350px !important;
}
#send-btn, #attach-btn {
border-radius: 12px !important;
padding: 14px !important;
min-width: 50px !important;
}
#welcome-message {
text-align: center;
padding: 2rem;
display: flex;
flex-direction: column;
justify-content: center;
gap: 0.5rem;
}
#welcome-message h2 { font-size: 2.2rem; font-weight: 700; }
#welcome-message p { font-size: 1.1rem; opacity: 0.7; }
"""
def build_ui() -> gr.Blocks:
"""
Construye la interfaz de chat minimalista inspirada en ChatGPT.
"""
with gr.Blocks(
title="Llama Dev Suite",
css=APP_CSS,
theme=gr.themes.Default(primary_hue="orange", secondary_hue="blue"),
) as demo:
app_state = gr.State(value=_init_state())
# Header with model selection
with gr.Row(elem_id="header"):
gr.HTML("<h1>Llama Dev Suite</h1>")
model_dd = gr.Dropdown(
choices=list_models(),
value="hf.co/mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated-GGUF",
label="Seleccionar Modelo",
show_label=False,
elem_id="model-selector",
interactive=True,
)
# Welcome message or chat
with gr.Column(elem_id="chat-area"):
gr.HTML(
"""
<section class="welcome-card" id="welcome-card">
<h2>Tu Asistente de IA Personal</h2>
<p>Comienza una conversación para obtener ayuda con código, ideas y más.</p>
</section>
""",
elem_id="welcome-message",
)
chat_box = gr.Chatbot(
label="",
height=None,
elem_id="chat-canvas",
show_label=False,
container=False,
visible=True
)
with gr.Row(elem_id="prompt-bar"):
attach_btn = gr.Button("📎", elem_id="attach-btn", size="sm")
user_in = gr.Textbox(
placeholder="Escribe tu mensaje...",
lines=1,
elem_id="prompt-input",
show_label=False,
container=False,
)
send_btn = gr.Button("Enviar", elem_id="send-btn", variant="primary", size="sm")
# Hidden components for logic
files_in = gr.File(
label="Attach files",
file_count="multiple",
visible=False,
elem_id="file-input",
)
# --- Event Handlers ---
chat_inputs = [
app_state,
chat_box,
user_in,
model_dd,
files_in,
]
chat_outputs = [
app_state,
chat_box,
user_in,
files_in,
]
send_btn.click(main_chat, inputs=chat_inputs, outputs=chat_outputs)
user_in.submit(main_chat, inputs=chat_inputs, outputs=chat_outputs)
# --- Helper JS to hide welcome message on first interaction ---
gr.HTML(
"""
<script>
(function attachWelcomeListeners() {
const inputContainer = document.querySelector('#prompt-input textarea');
const welcomeCard = document.getElementById('welcome-card');
if (!inputContainer || !welcomeCard) {
requestAnimationFrame(attachWelcomeListeners);
return;
}
function handleFirstInteraction() {
if (welcomeCard.style.display !== 'none') {
welcomeCard.style.display = 'none';
}
}
inputContainer.addEventListener('focus', handleFirstInteraction, { once: true });
inputContainer.addEventListener('input', (event) => {
if (event.target.value.length > 0) {
handleFirstInteraction();
}
});
})();
</script>
"""
)
# Verify Ollama status on load
demo.load(verify, outputs=[], every=60)
return demo
if __name__ == "__main__":
demo = build_ui()
print("🚀 Starting Llama Dev Suite...")
print(f"📡 Ollama URL: {settings.OLLAMA_URL}")
print(f"🌐 Server: http://{settings.HOST}:{settings.PORT}")
# Intenta iniciar Ollama al arrancar la app
start_ollama()
demo.launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)