bot / app.py
teslatony's picture
Update app.py
d8518d3 verified
import os
import base64
import requests
from typing import Dict
import gradio as gr
from huggingface_hub import InferenceClient
# -----------------------------
# GitHub Prompt Configuration
# -----------------------------
GITHUB_OWNER = "Tony14100"
GITHUB_REPO = "Prompt-Tonic"
GITHUB_REF = "main"
GITHUB_PATH = "OzonConsultant"
GITHUB_API_URL = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/contents/{GITHUB_PATH}?ref={GITHUB_REF}"
LOCAL_PROMPT_FILE = "prompt_cache.txt"
LOCAL_SHA_FILE = "prompt_cache_sha.txt"
# -----------------------------
# Load system prompt from GitHub
# -----------------------------
def load_system_prompt():
github_token = os.getenv("GITHUB_TOKEN")
if not github_token:
return "Вы — полезный консультант по Ozon. Отвечайте четко и профессионально."
try:
headers = {'Authorization': f'token {github_token}', 'User-Agent': 'Gradio-App'}
response = requests.get(GITHUB_API_URL, headers=headers, timeout=10)
response.raise_for_status()
data = response.json()
current_sha = data['sha']
cached_sha = None
if os.path.exists(LOCAL_SHA_FILE):
with open(LOCAL_SHA_FILE, 'r') as f:
cached_sha = f.read().strip()
if cached_sha == current_sha and os.path.exists(LOCAL_PROMPT_FILE):
with open(LOCAL_PROMPT_FILE, 'r', encoding='utf-8') as f:
return f.read().strip()
content_b64 = data['content']
prompt = base64.b64decode(content_b64).decode('utf-8').strip()
with open(LOCAL_PROMPT_FILE, 'w', encoding='utf-8') as f:
f.write(prompt)
with open(LOCAL_SHA_FILE, 'w') as f:
f.write(current_sha)
return prompt
except:
return "Вы — полезный консультант по Ozon. Отвечайте четко и профессионально."
SYSTEM_PROMPT = load_system_prompt()
# -----------------------------
# HF Client
# -----------------------------
MODEL_ID = "zai-org/GLM-4.5"
MAX_TOKENS = 1024
def get_client() -> InferenceClient:
token = os.getenv("HF_TOKEN") or os.getenv("HF_API_KEY")
if not token:
raise RuntimeError("HF_TOKEN не найден!")
return InferenceClient(token=token)
# -----------------------------
# State management
# -----------------------------
def reset_state():
return {"messages": [], "system_used": False}
def mock_predict(user_message: str):
return f"[Mock reply] Ты написал: {user_message}"
def on_user_message(user_message: str, state: Dict):
if not user_message.strip():
return [], state, gr.update(value="")
messages = state["messages"]
if not state["system_used"]:
messages.insert(0, {"role": "system", "content": SYSTEM_PROMPT})
state["system_used"] = True
messages.append({"role": "user", "content": user_message})
try:
client = get_client()
response = client.chat_completion(
model=MODEL_ID,
messages=messages,
max_tokens=MAX_TOKENS,
temperature=0.5,
stream=False,
)
assistant_reply = response.choices[0].message["content"].strip()
except:
assistant_reply = mock_predict(user_message)
messages.append({"role": "assistant", "content": assistant_reply})
state["messages"] = messages
chat_history = []
for msg in messages:
if msg["role"] != "system" and msg["content"].strip():
html = (
f'<div style="margin:5px;">'
f'<span style="color:#00aaff; font-family:Arial, sans-serif; font-size:14px;">'
f'{msg["content"]}'
f'</span></div>'
)
chat_history.append({"role": msg["role"], "content": html})
return chat_history, state, gr.update(value="")
# -----------------------------
# Build UI
# -----------------------------
def build_ui():
css = """
body {background-color:#000000; color:#00aaff;}
.chat-container {display:flex; flex-direction:row; gap:20px; height:600px;}
.chat-box { flex:2; overflow-y:auto; padding:10px; border:1px solid #00aaff; border-radius:5px; background:#000; width:100%; }
.ads-box {flex:1; display:flex; flex-direction:column; gap:10px; height:600px;}
.ad { background-color:#111; color:#fff; flex:1; display:flex; justify-content:center; align-items:center; font-weight:bold; border:1px solid #00aaff; border-radius:5px; }
/* СТИЛЬ ОКНА ВВОДА — КАК ОКНО ЧАТА */
.input-styled {
width:100% !important;
max-width: none !important;
}
.input-styled .wrap {
width:100% !important;
}
.input-styled textarea {
width:100% !important;
background:#000000 !important;
color:#00aaff !important;
border:1px solid #00aaff !important;
border-radius:5px !important;
padding:10px !important;
font-size:14px !important;
font-family:Arial, sans-serif !important;
resize: vertical;
min-height: 40px;
}
.input-styled label {
color: #00aaff !important;
font-family: Arial, sans-serif;
font-size: 14px;
}
"""
autoscroll_js = """
<script>
const chatObserver = new MutationObserver((mutations) => {
const chatboxes = document.querySelectorAll('.chat-box');
chatboxes.forEach(chat => chat.scrollTop = chat.scrollHeight);
});
document.querySelectorAll('.chat-box').forEach(el => chatObserver.observe(el, {childList:true, subtree:true}));
</script>
"""
initial_bot_message = (
"Привет! Я консультант Ozon. Могу помочь с товарами, заказами и продажами. "
"С чем хотите помочь первым?"
)
chat_history_initial = [
{
"role": "assistant",
"content": (
f'<div style="margin:5px;">'
f'<span style="color:#00aaff; font-family:Arial; font-size:14px;">'
f'{initial_bot_message}</span></div>'
),
}
]
with gr.Blocks() as app:
gr.HTML(f"<style>{css}</style>")
gr.HTML(autoscroll_js)
with gr.Row(elem_classes="chat-container"):
with gr.Column(scale=3):
chat = gr.Chatbot(value=chat_history_initial, elem_classes="chat-box")
input_box = gr.Textbox(
placeholder="Введите сообщение…",
label="Сообщение",
elem_classes="input-styled",
lines=1
)
state = gr.State(reset_state())
input_box.submit(
on_user_message,
inputs=[input_box, state],
outputs=[chat, state, input_box]
)
gr.Button("Очистить чат").click(
lambda: (chat_history_initial, reset_state(), gr.update(value="")),
None,
[chat, state, input_box]
)
with gr.Column(scale=1):
with gr.Row(elem_classes="ads-box"):
gr.HTML('<div class="ad">Здесь может быть ваша реклама</div>')
gr.HTML('<div class="ad">Здесь может быть ваша реклама</div>')
return app
app = build_ui()
if __name__ == "__main__":
app.queue(max_size=5)
app.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False)