ling-series-spaces / tab_chat.py
GitHub Action
Sync ling-space changes (filtered) from commit 127300e
b931367
raw
history blame
6.91 kB
import gradio as gr
import uuid
from datetime import datetime
import pandas as pd
from model_handler import ModelHandler
from config import CHAT_MODEL_SPECS, LING_1T
from recommand_config import RECOMMENDED_INPUTS
from ui_components.model_selector import create_model_selector
def create_chat_tab():
model_handler = ModelHandler()
conversation_store = gr.BrowserState(default_value=[], storage_key="ling_conversation_history")
current_conversation_id = gr.BrowserState(default_value=None, storage_key="ling_current_conversation_id")
def get_history_df(history):
if not history:
return pd.DataFrame({'ID': [], 'Conversation': []})
df = pd.DataFrame(history)
return df[['id', 'title']].rename(columns={'id': 'ID', 'title': 'Conversation'})
def handle_new_chat(history):
conv_id = str(uuid.uuid4())
new_convo = {
"id": conv_id, "title": "New Conversation",
"messages": [], "timestamp": datetime.now().isoformat()
}
updated_history = [new_convo] + (history or [])
return (
conv_id,
updated_history,
[],
gr.update(value=get_history_df(updated_history))
)
def load_conversation_from_df(df: pd.DataFrame, evt: gr.SelectData, history):
if evt.index is None:
return None, []
selected_id = df.iloc[evt.index[0]]['ID']
for convo in history:
if convo["id"] == selected_id:
return selected_id, convo["messages"]
# Fallback to new chat if something goes wrong
return handle_new_chat(history)[0], handle_new_chat(history)[2]
with gr.Row(equal_height=False, elem_id="indicator-chat-tab"):
with gr.Column(scale=1):
new_chat_btn = gr.Button("➕ 新对话")
history_df = gr.DataFrame(
value=get_history_df(conversation_store.value),
headers=["ID", "对话记录"],
datatype=["str", "str"],
interactive=False,
visible=True,
column_widths=["0%", "99%"]
)
with gr.Column(scale=4):
chatbot = gr.Chatbot(height=500, type='messages')
with gr.Row():
textbox = gr.Textbox(placeholder="输入消息...", container=False, scale=7)
submit_btn = gr.Button("发送", scale=1)
gr.Markdown("### 推荐对话")
recommended_dataset = gr.Dataset(
components=[gr.Textbox(visible=False)],
samples=[[item["task"]] for item in RECOMMENDED_INPUTS],
label="推荐场景", headers=["选择一个场景试试"],
)
with gr.Column(scale=1):
model_dropdown, model_description_markdown = create_model_selector(
model_specs=CHAT_MODEL_SPECS,
default_model_constant=LING_1T
)
system_prompt_textbox = gr.Textbox(label="系统提示词", lines=5, placeholder="输入系统提示词...")
temperature_slider = gr.Slider(minimum=0, maximum=1.0, value=0.7, step=0.1, label="温度参数")
# --- Event Handlers --- #
# The change handler is now encapsulated within create_model_selector
def on_select_recommendation(evt: gr.SelectData, history):
selected_task = evt.value[0]
item = next((i for i in RECOMMENDED_INPUTS if i["task"] == selected_task), None)
if not item: return gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update()
new_id, new_history, new_messages, history_df_update = handle_new_chat(history)
return (
new_id, new_history,
gr.update(value=item["model"]),
gr.update(value=item["system_prompt"]),
gr.update(value=item["temperature"]),
gr.update(value=item["user_message"]),
history_df_update,
new_messages
)
recommended_dataset.select(on_select_recommendation, inputs=[conversation_store], outputs=[current_conversation_id, conversation_store, model_dropdown, system_prompt_textbox, temperature_slider, textbox, history_df, chatbot], show_progress="none")
def chat_stream(conv_id, history, model_display_name, message, chat_history, system_prompt, temperature):
if not message:
yield chat_history
return
model_constant = next((k for k, v in CHAT_MODEL_SPECS.items() if v["display_name"] == model_display_name), LING_1T)
response_generator = model_handler.get_response(model_constant, message, chat_history, system_prompt, temperature)
for history_update in response_generator:
yield history_update
def on_chat_stream_complete(conv_id, history, final_chat_history):
current_convo = next((c for c in history if c["id"] == conv_id), None)
if not current_convo:
return history, gr.update()
if len(final_chat_history) > len(current_convo["messages"]) and current_convo["title"] == "New Conversation":
user_message = final_chat_history[-2]["content"] if len(final_chat_history) > 1 else final_chat_history[0]["content"]
current_convo["title"] = user_message[:50]
current_convo["messages"] = final_chat_history
current_convo["timestamp"] = datetime.now().isoformat()
history = sorted([c for c in history if c["id"] != conv_id] + [current_convo], key=lambda x: x["timestamp"], reverse=True)
return history, gr.update(value=get_history_df(history))
submit_btn.click(
chat_stream,
[current_conversation_id, conversation_store, model_dropdown, textbox, chatbot, system_prompt_textbox, temperature_slider],
[chatbot]
).then(
on_chat_stream_complete,
[current_conversation_id, conversation_store, chatbot],
[conversation_store, history_df]
)
textbox.submit(
chat_stream,
[current_conversation_id, conversation_store, model_dropdown, textbox, chatbot, system_prompt_textbox, temperature_slider],
[chatbot]
).then(
on_chat_stream_complete,
[current_conversation_id, conversation_store, chatbot],
[conversation_store, history_df]
)
new_chat_btn.click(handle_new_chat, inputs=[conversation_store], outputs=[current_conversation_id, conversation_store, chatbot, history_df])
history_df.select(load_conversation_from_df, inputs=[history_df, conversation_store], outputs=[current_conversation_id, chatbot])
return conversation_store, current_conversation_id, history_df, chatbot