File size: 7,703 Bytes
3d39c51
2f6e4d6
 
9f77d75
3d39c51
 
 
 
2f6e4d6
3d39c51
9f77d75
 
 
6a2d7d0
805b131
3d39c51
6a2d7d0
3d39c51
2f6e4d6
 
 
9f77d75
 
 
2f6e4d6
9f77d75
2f6e4d6
805b131
9f77d75
 
2f6e4d6
805b131
9f77d75
3c2436a
 
805b131
 
2f6e4d6
 
 
9f77d75
 
 
 
805b131
d04fba4
2f6e4d6
3d39c51
 
 
 
2f6e4d6
3d39c51
2f6e4d6
 
 
3d39c51
 
 
2f6e4d6
3d39c51
 
d04fba4
2f6e4d6
d04fba4
3d39c51
24f1379
3d39c51
 
2f6e4d6
3d39c51
9f77d75
3d39c51
 
 
 
805b131
 
 
3d39c51
 
 
9f77d75
3d39c51
 
 
 
 
 
d04fba4
3d39c51
805b131
 
 
 
 
ee3388a
 
 
 
 
 
ab3f6bc
3d39c51
 
 
2f6e4d6
3d39c51
 
2f6e4d6
 
d8518d3
 
 
 
 
 
 
 
3ea2ad2
d8518d3
 
95fc597
 
d8518d3
95fc597
3ea2ad2
ee3388a
 
 
 
 
d8518d3
 
 
 
 
 
 
ee3388a
2f6e4d6
 
 
 
 
 
 
 
 
 
ee3388a
d8518d3
 
ee3388a
 
 
 
 
 
 
 
 
 
 
2f6e4d6
 
 
 
 
d8518d3
ee3388a
 
d8518d3
3ea2ad2
d8518d3
ee3388a
3c2436a
ee3388a
 
 
 
 
 
 
 
 
 
2f6e4d6
 
 
 
3d39c51
 
3c2436a
805b131
3d39c51
d8518d3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
import os
import base64
import requests
from typing import Dict
import gradio as gr
from huggingface_hub import InferenceClient

# -----------------------------
# GitHub Prompt Configuration
# -----------------------------
GITHUB_OWNER = "Tony14100"
GITHUB_REPO = "Prompt-Tonic"
GITHUB_REF = "main"
GITHUB_PATH = "OzonConsultant"
GITHUB_API_URL = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/contents/{GITHUB_PATH}?ref={GITHUB_REF}"
LOCAL_PROMPT_FILE = "prompt_cache.txt"
LOCAL_SHA_FILE = "prompt_cache_sha.txt"

# -----------------------------
# Load system prompt from GitHub
# -----------------------------
def load_system_prompt():
    github_token = os.getenv("GITHUB_TOKEN")
    if not github_token:
        return "Вы — полезный консультант по Ozon. Отвечайте четко и профессионально."
    try:
        headers = {'Authorization': f'token {github_token}', 'User-Agent': 'Gradio-App'}
        response = requests.get(GITHUB_API_URL, headers=headers, timeout=10)
        response.raise_for_status()
        data = response.json()
        current_sha = data['sha']
        cached_sha = None
        if os.path.exists(LOCAL_SHA_FILE):
            with open(LOCAL_SHA_FILE, 'r') as f:
                cached_sha = f.read().strip()
        if cached_sha == current_sha and os.path.exists(LOCAL_PROMPT_FILE):
            with open(LOCAL_PROMPT_FILE, 'r', encoding='utf-8') as f:
                return f.read().strip()
        content_b64 = data['content']
        prompt = base64.b64decode(content_b64).decode('utf-8').strip()
        with open(LOCAL_PROMPT_FILE, 'w', encoding='utf-8') as f:
            f.write(prompt)
        with open(LOCAL_SHA_FILE, 'w') as f:
            f.write(current_sha)
        return prompt
    except:
        return "Вы — полезный консультант по Ozon. Отвечайте четко и профессионально."

SYSTEM_PROMPT = load_system_prompt()

# -----------------------------
# HF Client
# -----------------------------
MODEL_ID = "zai-org/GLM-4.5"
MAX_TOKENS = 1024

def get_client() -> InferenceClient:
    token = os.getenv("HF_TOKEN") or os.getenv("HF_API_KEY")
    if not token:
        raise RuntimeError("HF_TOKEN не найден!")
    return InferenceClient(token=token)

# -----------------------------
# State management
# -----------------------------
def reset_state():
    return {"messages": [], "system_used": False}

def mock_predict(user_message: str):
    return f"[Mock reply] Ты написал: {user_message}"

def on_user_message(user_message: str, state: Dict):
    if not user_message.strip():
        return [], state, gr.update(value="")
    messages = state["messages"]
    if not state["system_used"]:
        messages.insert(0, {"role": "system", "content": SYSTEM_PROMPT})
        state["system_used"] = True
    messages.append({"role": "user", "content": user_message})
    try:
        client = get_client()
        response = client.chat_completion(
            model=MODEL_ID,
            messages=messages,
            max_tokens=MAX_TOKENS,
            temperature=0.5,
            stream=False,
        )
        assistant_reply = response.choices[0].message["content"].strip()
    except:
        assistant_reply = mock_predict(user_message)
    messages.append({"role": "assistant", "content": assistant_reply})
    state["messages"] = messages
    chat_history = []
    for msg in messages:
        if msg["role"] != "system" and msg["content"].strip():
            html = (
                f'<div style="margin:5px;">'
                f'<span style="color:#00aaff; font-family:Arial, sans-serif; font-size:14px;">'
                f'{msg["content"]}'
                f'</span></div>'
            )
            chat_history.append({"role": msg["role"], "content": html})
    return chat_history, state, gr.update(value="")

# -----------------------------
# Build UI
# -----------------------------
def build_ui():
    css = """
    body {background-color:#000000; color:#00aaff;}
    .chat-container {display:flex; flex-direction:row; gap:20px; height:600px;}
    .chat-box { flex:2; overflow-y:auto; padding:10px; border:1px solid #00aaff; border-radius:5px; background:#000; width:100%; }
    .ads-box {flex:1; display:flex; flex-direction:column; gap:10px; height:600px;}
    .ad { background-color:#111; color:#fff; flex:1; display:flex; justify-content:center; align-items:center; font-weight:bold; border:1px solid #00aaff; border-radius:5px; }
    /* СТИЛЬ ОКНА ВВОДА — КАК ОКНО ЧАТА */
    .input-styled {
        width:100% !important;
        max-width: none !important;
    }
    .input-styled .wrap {
        width:100% !important;
    }
    .input-styled textarea {
        width:100% !important;
        background:#000000 !important;
        color:#00aaff !important;
        border:1px solid #00aaff !important;
        border-radius:5px !important;
        padding:10px !important;
        font-size:14px !important;
        font-family:Arial, sans-serif !important;
        resize: vertical;
        min-height: 40px;
    }
    .input-styled label {
        color: #00aaff !important;
        font-family: Arial, sans-serif;
        font-size: 14px;
    }
    """
    autoscroll_js = """
    <script>
    const chatObserver = new MutationObserver((mutations) => {
        const chatboxes = document.querySelectorAll('.chat-box');
        chatboxes.forEach(chat => chat.scrollTop = chat.scrollHeight);
    });
    document.querySelectorAll('.chat-box').forEach(el => chatObserver.observe(el, {childList:true, subtree:true}));
    </script>
    """
    initial_bot_message = (
        "Привет! Я консультант Ozon. Могу помочь с товарами, заказами и продажами. "
        "С чем хотите помочь первым?"
    )
    chat_history_initial = [
        {
            "role": "assistant",
            "content": (
                f'<div style="margin:5px;">'
                f'<span style="color:#00aaff; font-family:Arial; font-size:14px;">'
                f'{initial_bot_message}</span></div>'
            ),
        }
    ]
    with gr.Blocks() as app:
        gr.HTML(f"<style>{css}</style>")
        gr.HTML(autoscroll_js)
        with gr.Row(elem_classes="chat-container"):
            with gr.Column(scale=3):
                chat = gr.Chatbot(value=chat_history_initial, elem_classes="chat-box")
                input_box = gr.Textbox(
                    placeholder="Введите сообщение…",
                    label="Сообщение",
                    elem_classes="input-styled",
                    lines=1
                )
                state = gr.State(reset_state())
                input_box.submit(
                    on_user_message,
                    inputs=[input_box, state],
                    outputs=[chat, state, input_box]
                )
                gr.Button("Очистить чат").click(
                    lambda: (chat_history_initial, reset_state(), gr.update(value="")),
                    None,
                    [chat, state, input_box]
                )
            with gr.Column(scale=1):
                with gr.Row(elem_classes="ads-box"):
                    gr.HTML('<div class="ad">Здесь может быть ваша реклама</div>')
                    gr.HTML('<div class="ad">Здесь может быть ваша реклама</div>')
    return app

app = build_ui()
if __name__ == "__main__":
    app.queue(max_size=5)
    app.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False)