teslatony commited on
Commit
805b131
·
verified ·
1 Parent(s): 8d3f37e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +96 -43
app.py CHANGED
@@ -1,86 +1,105 @@
1
  import os
2
  import traceback
3
- import base64
4
  from typing import Dict
5
  import gradio as gr
6
  from huggingface_hub import InferenceClient
7
  import requests
 
8
 
9
  # -----------------------------
10
- # Настройки GitHub
11
  # -----------------------------
12
  GITHUB_OWNER = "Tony14100"
13
  GITHUB_REPO = "Prompt-Tonic"
14
  GITHUB_REF = "main"
15
  GITHUB_PATH = "OzonConsultant"
 
16
 
17
  LOCAL_PROMPT_FILE = "prompt_cache.txt"
18
  LOCAL_SHA_FILE = "prompt_cache_sha.txt"
19
 
 
 
 
20
  def load_system_prompt():
21
  github_token = os.getenv("GITHUB_TOKEN")
22
  default_prompt = "Вы — полезный консультант по Ozon. Отвечайте на вопросы клиентов четко и профессионально."
23
  if not github_token:
24
  return default_prompt
 
25
  headers = {
26
  'Authorization': f'token {github_token}',
 
27
  'Accept': 'application/vnd.github.v3+json'
28
  }
 
29
  try:
30
- url = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/contents/{GITHUB_PATH}?ref={GITHUB_REF}"
31
- response = requests.get(url, headers=headers, timeout=10)
32
  response.raise_for_status()
33
  data = response.json()
34
- content_b64 = data.get("content")
35
- current_sha = data.get("sha")
 
 
 
36
  if os.path.exists(LOCAL_SHA_FILE):
37
  with open(LOCAL_SHA_FILE, 'r') as f:
38
  cached_sha = f.read().strip()
39
- if cached_sha == current_sha and os.path.exists(LOCAL_PROMPT_FILE):
40
- with open(LOCAL_PROMPT_FILE, 'r', encoding='utf-8') as f:
41
- return f.read().strip() or default_prompt
 
 
 
 
 
 
 
 
42
  prompt = base64.b64decode(content_b64).decode('utf-8').strip()
 
 
 
43
  with open(LOCAL_PROMPT_FILE, 'w', encoding='utf-8') as f:
44
  f.write(prompt)
45
  with open(LOCAL_SHA_FILE, 'w') as f:
46
  f.write(current_sha)
47
- return prompt or default_prompt
48
- except Exception as e:
49
- print(f"[WARNING] GitHub fallback: {e}")
50
- if os.path.exists("OzonConsultant.txt"):
51
- try:
52
- with open("OzonConsultant.txt", 'r', encoding='utf-8') as f:
53
- return f.read().strip() or default_prompt
54
- except: pass
55
  return default_prompt
56
 
57
  SYSTEM_PROMPT = load_system_prompt()
 
58
  MODEL_ID = "zai-org/GLM-4.5"
59
  MAX_TOKENS = 1024
60
 
61
  # -----------------------------
62
- # Клиент HF
63
  # -----------------------------
64
  def get_client() -> InferenceClient:
65
  token = os.getenv("HF_TOKEN") or os.getenv("HF_API_KEY")
66
  if not token:
67
- raise RuntimeError("HF_TOKEN не найден")
68
  return InferenceClient(token=token)
69
 
70
  def reset_state():
71
  return {"messages": [], "system_used": False}
72
 
73
  def mock_predict(user_message: str):
74
- return f"[Mock reply] {user_message}"
75
 
76
  def on_user_message(user_message: str, state: Dict):
77
  if not user_message.strip():
78
  return [], state, gr.update(value="")
 
79
  messages = state["messages"]
80
  if not state["system_used"]:
81
- messages.insert(0, {"role":"system","content":SYSTEM_PROMPT})
82
- state["system_used"]=True
83
- messages.append({"role":"user","content":user_message})
 
 
84
  try:
85
  client = get_client()
86
  response = client.chat_completion(
@@ -91,38 +110,72 @@ def on_user_message(user_message: str, state: Dict):
91
  stream=False,
92
  )
93
  assistant_reply = response.choices[0].message["content"].strip()
94
- except Exception as e:
95
- print(f"[DEBUG] Ошибка модели: {e}")
96
  assistant_reply = mock_predict(user_message)
97
- messages.append({"role":"assistant","content":assistant_reply})
98
- state["messages"]=messages
99
- chat_history = [{"role":m["role"],"content":m["content"]} for m in messages if m["role"]!="system"]
 
 
 
 
 
 
100
  return chat_history, state, gr.update(value="")
101
 
102
  # -----------------------------
103
  # UI
104
  # -----------------------------
105
  def build_ui():
106
- with gr.Blocks(css="""
107
- .chat-container {width:60%; float:left;}
108
- .banner-container {width:35%; float:right; position:sticky; top:0;}
109
- .chatbot .message {white-space: pre-wrap;}
110
- """) as app:
 
 
 
 
 
 
 
 
 
 
111
  with gr.Row():
112
- with gr.Column(scale=3, elem_classes="chat-container"):
113
- chat = gr.Chatbot(height=600)
114
- input_box = gr.Textbox(placeholder="Введите сообщение…")
115
  state = gr.State(reset_state())
116
- input_box.submit(on_user_message, inputs=[input_box,state], outputs=[chat,state,input_box])
 
 
 
 
117
  clear_btn = gr.Button("Очистить чат")
118
- clear_btn.click(lambda: ([], reset_state(), gr.update(value="")), None, [chat,state,input_box])
119
- with gr.Column(scale=2, elem_classes="banner-container"):
120
- gr.Markdown("### 📢 Рекламный баннер")
121
- gr.HTML('<a href="https://example.com" target="_blank"><img src="https://via.placeholder.com/300x600" style="width:100%;"></a>')
 
 
 
 
 
 
 
 
122
  return app
123
 
124
  app = build_ui()
125
 
126
- if __name__=="__main__":
 
 
 
 
 
127
  app.queue(max_size=5)
128
- app.launch(server_name="0.0.0.0", server_port=7860, debug=True)
 
 
 
1
  import os
2
  import traceback
 
3
  from typing import Dict
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
6
  import requests
7
+ import base64
8
 
9
  # -----------------------------
10
+ # GitHub конфигурация
11
  # -----------------------------
12
  GITHUB_OWNER = "Tony14100"
13
  GITHUB_REPO = "Prompt-Tonic"
14
  GITHUB_REF = "main"
15
  GITHUB_PATH = "OzonConsultant"
16
+ GITHUB_API_URL = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/contents/{GITHUB_PATH}?ref={GITHUB_REF}"
17
 
18
  LOCAL_PROMPT_FILE = "prompt_cache.txt"
19
  LOCAL_SHA_FILE = "prompt_cache_sha.txt"
20
 
21
+ # -----------------------------
22
+ # Загрузка системного промпта
23
+ # -----------------------------
24
  def load_system_prompt():
25
  github_token = os.getenv("GITHUB_TOKEN")
26
  default_prompt = "Вы — полезный консультант по Ozon. Отвечайте на вопросы клиентов четко и профессионально."
27
  if not github_token:
28
  return default_prompt
29
+
30
  headers = {
31
  'Authorization': f'token {github_token}',
32
+ 'User-Agent': 'Gradio-App-Request',
33
  'Accept': 'application/vnd.github.v3+json'
34
  }
35
+
36
  try:
37
+ response = requests.get(GITHUB_API_URL, headers=headers, timeout=10)
 
38
  response.raise_for_status()
39
  data = response.json()
40
+ current_sha = data.get('sha')
41
+ if not current_sha:
42
+ return default_prompt
43
+
44
+ cached_sha = None
45
  if os.path.exists(LOCAL_SHA_FILE):
46
  with open(LOCAL_SHA_FILE, 'r') as f:
47
  cached_sha = f.read().strip()
48
+
49
+ if cached_sha == current_sha and os.path.exists(LOCAL_PROMPT_FILE):
50
+ with open(LOCAL_PROMPT_FILE, 'r', encoding='utf-8') as f:
51
+ prompt = f.read().strip()
52
+ if prompt:
53
+ return prompt
54
+
55
+ content_b64 = data.get('content')
56
+ if not content_b64:
57
+ return default_prompt
58
+
59
  prompt = base64.b64decode(content_b64).decode('utf-8').strip()
60
+ if not prompt:
61
+ return default_prompt
62
+
63
  with open(LOCAL_PROMPT_FILE, 'w', encoding='utf-8') as f:
64
  f.write(prompt)
65
  with open(LOCAL_SHA_FILE, 'w') as f:
66
  f.write(current_sha)
67
+
68
+ return prompt
69
+ except Exception:
 
 
 
 
 
70
  return default_prompt
71
 
72
  SYSTEM_PROMPT = load_system_prompt()
73
+
74
  MODEL_ID = "zai-org/GLM-4.5"
75
  MAX_TOKENS = 1024
76
 
77
  # -----------------------------
78
+ # Вспомогательные функции
79
  # -----------------------------
80
  def get_client() -> InferenceClient:
81
  token = os.getenv("HF_TOKEN") or os.getenv("HF_API_KEY")
82
  if not token:
83
+ raise RuntimeError("HF_TOKEN не найден. Добавь его в Secrets.")
84
  return InferenceClient(token=token)
85
 
86
  def reset_state():
87
  return {"messages": [], "system_used": False}
88
 
89
  def mock_predict(user_message: str):
90
+ return f"[Mock reply] Вы написали: {user_message}"
91
 
92
  def on_user_message(user_message: str, state: Dict):
93
  if not user_message.strip():
94
  return [], state, gr.update(value="")
95
+
96
  messages = state["messages"]
97
  if not state["system_used"]:
98
+ messages.insert(0, {"role": "system", "content": SYSTEM_PROMPT})
99
+ state["system_used"] = True
100
+
101
+ messages.append({"role": "user", "content": user_message})
102
+ assistant_reply = ""
103
  try:
104
  client = get_client()
105
  response = client.chat_completion(
 
110
  stream=False,
111
  )
112
  assistant_reply = response.choices[0].message["content"].strip()
113
+ except Exception:
 
114
  assistant_reply = mock_predict(user_message)
115
+
116
+ messages.append({"role": "assistant", "content": assistant_reply})
117
+ state["messages"] = messages
118
+
119
+ chat_history = []
120
+ for msg in messages:
121
+ if msg["role"] != "system" and msg["content"].strip():
122
+ chat_history.append({"role": msg["role"], "content": msg["content"]})
123
+
124
  return chat_history, state, gr.update(value="")
125
 
126
  # -----------------------------
127
  # UI
128
  # -----------------------------
129
  def build_ui():
130
+ LOGO_URL = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcS5Z1B-LcytPRiWBQGi6OLAdz5UIfijjAgrNg&s"
131
+ css = """
132
+ .chat-column { width: 400px; float: left; }
133
+ .ad-column { position: fixed; right: 0; top: 0; width: 200px; height: 100vh; background-color: #f0f0f0; padding: 10px; overflow: auto; }
134
+ .gr-chat-message { white-space: pre-wrap; }
135
+ """
136
+ with gr.Blocks(css=css) as app:
137
+ # Логотип
138
+ gr.HTML(f"""
139
+ <div style="text-align:center; margin-bottom:10px;">
140
+ <img src="{LOGO_URL}" width="100" height="100" style="border-radius:50%;">
141
+ </div>
142
+ """)
143
+ gr.Markdown("**Спроси меня**")
144
+
145
  with gr.Row():
146
+ with gr.Column(elem_classes="chat-column"):
147
+ chat = gr.Chatbot(height=550)
148
+ input_box = gr.Textbox(placeholder="Введите сообщение…", label="Сообщение")
149
  state = gr.State(reset_state())
150
+ input_box.submit(
151
+ on_user_message,
152
+ inputs=[input_box, state],
153
+ outputs=[chat, state, input_box]
154
+ )
155
  clear_btn = gr.Button("Очистить чат")
156
+ clear_btn.click(
157
+ lambda: ([], reset_state(), gr.update(value="")),
158
+ None,
159
+ [chat, state, input_box]
160
+ )
161
+
162
+ with gr.Column(elem_classes="ad-column"):
163
+ gr.HTML("""
164
+ <h4>Реклама</h4>
165
+ <a href="https://example.com" target="_blank">Перейти на баннер</a>
166
+ <p>Баннер всегда виден при скролле.</p>
167
+ """)
168
  return app
169
 
170
  app = build_ui()
171
 
172
+ if __name__ == "__main__":
173
+ try:
174
+ get_client()
175
+ except Exception as e:
176
+ print(f"[WARNING] HF_TOKEN: {e}")
177
+
178
  app.queue(max_size=5)
179
+ app.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False)
180
+
181
+