Spaces:
Running
Running
File size: 3,741 Bytes
f317053 a4845b8 810075f f317053 810075f f317053 d259b29 a4845b8 810075f 40a5707 810075f f317053 67203d1 810075f 67203d1 810075f d259b29 810075f d259b29 810075f 67203d1 f317053 a4845b8 810075f d259b29 a4845b8 d259b29 810075f a4845b8 d259b29 810075f a4845b8 810075f a4845b8 810075f d259b29 810075f f317053 810075f f317053 810075f f317053 810075f a4845b8 810075f a4845b8 810075f f317053 810075f a4845b8 f317053 810075f f317053 810075f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import os
import base64
import requests
import traceback
import gradio as gr
# -----------------------------
# GitHub Configuration
# -----------------------------
GITHUB_OWNER = "Tony14100"
GITHUB_REPO = "Prompt-Tonic"
GITHUB_REF = "main"
# Пути к файлам в репозитории
BOT_FILE_PATH = "OzonConsultantbot.py"
PROMPT_FILE_PATH = "OzonConsultant"
# Локальные кэши
BOT_CACHE_FILE = "bot_cache.py"
BOT_CACHE_SHA_FILE = "bot_cache_sha.txt"
PROMPT_CACHE_FILE = "prompt_cache.txt"
PROMPT_CACHE_SHA_FILE = "prompt_cache_sha.txt"
# -----------------------------
# Функция загрузки файлов с GitHub
# -----------------------------
def load_github_file(path_in_repo, local_cache, sha_file):
github_token = os.getenv("GITHUB_TOKEN")
fallback_content = "" # Пустой fallback, можно задать дефолт
if not github_token:
print(f"⚠️ GITHUB_TOKEN не найден. Используется fallback для {path_in_repo}")
return fallback_content
github_api_url = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/contents/{path_in_repo}?ref={GITHUB_REF}"
try:
headers = {"Authorization": f"token {github_token}", "User-Agent": "Gradio-App"}
response = requests.get(github_api_url, headers=headers, timeout=10)
response.raise_for_status()
data = response.json()
current_sha = data["sha"]
cached_sha = None
if os.path.exists(sha_file):
with open(sha_file, "r") as f:
cached_sha = f.read().strip()
if cached_sha == current_sha and os.path.exists(local_cache):
with open(local_cache, "r", encoding="utf-8") as f:
return f.read()
content_b64 = data["content"]
content = base64.b64decode(content_b64).decode("utf-8")
# Сохраняем кэш
with open(local_cache, "w", encoding="utf-8") as f:
f.write(content)
with open(sha_file, "w") as f:
f.write(current_sha)
return content
except Exception as e:
print(f"❌ Ошибка загрузки {path_in_repo}: {e}")
traceback.print_exc()
return fallback_content
# -----------------------------
# Загрузка промта и кода бота
# -----------------------------
SYSTEM_PROMPT = load_github_file(PROMPT_FILE_PATH, PROMPT_CACHE_FILE, PROMPT_CACHE_SHA_FILE)
bot_code = load_github_file(BOT_FILE_PATH, BOT_CACHE_FILE, BOT_CACHE_SHA_FILE)
# -----------------------------
# Выполнение кода бота
# -----------------------------
if bot_code.strip():
try:
exec(bot_code, globals())
print("✅ Код бота успешно загружен и выполнен")
except Exception as e:
print(f"❌ Ошибка при выполнении кода бота: {e}")
traceback.print_exc()
else:
print("⚠️ Код бота пустой, используется fallback mock bot")
def start_bot():
def mock_predict(msg):
return f"[Mock reply] {msg}"
with gr.Blocks() as app:
txt = gr.Textbox(label="Введите сообщение")
out = gr.Textbox(label="Ответ бота")
btn = gr.Button("Отправить")
btn.click(lambda m: mock_predict(m), inputs=txt, outputs=out)
return app
# -----------------------------
# Запуск приложения
# -----------------------------
if "start_bot" in globals():
app = start_bot()
if __name__ == "__main__":
app.queue(max_size=5)
app.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False) |