Update app.py
Browse files
app.py
CHANGED
|
@@ -142,12 +142,10 @@ def predict(_chatbot, task_history) -> list:
|
|
| 142 |
query = [{'image': chat_query[0]}]
|
| 143 |
else:
|
| 144 |
query = [{'text': _parse_text(chat_query)}]
|
| 145 |
-
|
| 146 |
print("Query for model:", query)
|
| 147 |
inputs = tokenizer.from_list_format(query)
|
| 148 |
tokenized_inputs = tokenizer(inputs, return_tensors='pt')
|
| 149 |
tokenized_inputs = tokenized_inputs.to(model.device)
|
| 150 |
-
|
| 151 |
pred = model.generate(**tokenized_inputs)
|
| 152 |
response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
|
| 153 |
print("Model response:", response)
|
|
@@ -160,8 +158,8 @@ def predict(_chatbot, task_history) -> list:
|
|
| 160 |
_chatbot[-1] = (chat_query, "No image to display.")
|
| 161 |
else:
|
| 162 |
_chatbot[-1] = (chat_query, response)
|
| 163 |
-
return _chatbot
|
| 164 |
-
|
| 165 |
def save_uploaded_image(image_file, upload_dir):
|
| 166 |
if image is None:
|
| 167 |
return None
|
|
@@ -234,7 +232,7 @@ def _launch_demo(args, model, tokenizer):
|
|
| 234 |
|
| 235 |
with gr.Blocks() as demo:
|
| 236 |
gr.Markdown("""
|
| 237 |
-
# 🙋🏻♂️欢迎来到🌟Tonic
|
| 238 |
# 🙋🏻♂️Welcome toTonic's Qwen-VL-Chat Bot!
|
| 239 |
该WebUI基于Qwen-VL-Chat,实现聊天机器人功能。 但我必须解决它的很多问题,也许我也能获得一些荣誉。
|
| 240 |
Qwen-VL-Chat 是一种多模式输入模型。 您可以使用此空间来测试当前模型 [qwen/Qwen-VL-Chat](https://huggingface.co/qwen/Qwen-VL-Chat) 您也可以使用 🧑🏻🚀qwen/Qwen-VL -通过克隆这个空间来聊天🚀。 🧬🔬🔍 只需点击这里:[重复空间](https://huggingface.co/spaces/Tonic1/VLChat?duplicate=true)
|
|
@@ -244,16 +242,16 @@ Join us: TeamTonic is always making cool demos! Join our active builder's comm
|
|
| 244 |
""")
|
| 245 |
with gr.Row():
|
| 246 |
with gr.Column(scale=1):
|
| 247 |
-
chatbot = gr.Chatbot(label='Qwen-VL-Chat')
|
| 248 |
with gr.Column(scale=1):
|
| 249 |
with gr.Row():
|
| 250 |
query = gr.Textbox(lines=2, label='Input', placeholder="Type your message here...")
|
| 251 |
-
submit_btn = gr.Button("
|
| 252 |
with gr.Row():
|
| 253 |
-
file_upload = gr.UploadButton("
|
| 254 |
-
submit_file_btn = gr.Button("Submit Image")
|
| 255 |
-
regen_btn = gr.Button("
|
| 256 |
-
empty_bin = gr.Button("
|
| 257 |
task_history = gr.State([])
|
| 258 |
|
| 259 |
submit_btn.click(
|
|
|
|
| 142 |
query = [{'image': chat_query[0]}]
|
| 143 |
else:
|
| 144 |
query = [{'text': _parse_text(chat_query)}]
|
|
|
|
| 145 |
print("Query for model:", query)
|
| 146 |
inputs = tokenizer.from_list_format(query)
|
| 147 |
tokenized_inputs = tokenizer(inputs, return_tensors='pt')
|
| 148 |
tokenized_inputs = tokenized_inputs.to(model.device)
|
|
|
|
| 149 |
pred = model.generate(**tokenized_inputs)
|
| 150 |
response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
|
| 151 |
print("Model response:", response)
|
|
|
|
| 158 |
_chatbot[-1] = (chat_query, "No image to display.")
|
| 159 |
else:
|
| 160 |
_chatbot[-1] = (chat_query, response)
|
| 161 |
+
return _chatbot, task_history
|
| 162 |
+
|
| 163 |
def save_uploaded_image(image_file, upload_dir):
|
| 164 |
if image is None:
|
| 165 |
return None
|
|
|
|
| 232 |
|
| 233 |
with gr.Blocks() as demo:
|
| 234 |
gr.Markdown("""
|
| 235 |
+
# 🙋🏻♂️欢迎来到🌟Tonic 的🦆Qwen-VL-Chat🤩Bot!🚀
|
| 236 |
# 🙋🏻♂️Welcome toTonic's Qwen-VL-Chat Bot!
|
| 237 |
该WebUI基于Qwen-VL-Chat,实现聊天机器人功能。 但我必须解决它的很多问题,也许我也能获得一些荣誉。
|
| 238 |
Qwen-VL-Chat 是一种多模式输入模型。 您可以使用此空间来测试当前模型 [qwen/Qwen-VL-Chat](https://huggingface.co/qwen/Qwen-VL-Chat) 您也可以使用 🧑🏻🚀qwen/Qwen-VL -通过克隆这个空间来聊天🚀。 🧬🔬🔍 只需点击这里:[重复空间](https://huggingface.co/spaces/Tonic1/VLChat?duplicate=true)
|
|
|
|
| 242 |
""")
|
| 243 |
with gr.Row():
|
| 244 |
with gr.Column(scale=1):
|
| 245 |
+
chatbot = gr.Chatbot(label='🦆Qwen-VL-Chat')
|
| 246 |
with gr.Column(scale=1):
|
| 247 |
with gr.Row():
|
| 248 |
query = gr.Textbox(lines=2, label='Input', placeholder="Type your message here...")
|
| 249 |
+
submit_btn = gr.Button("📨Submit")
|
| 250 |
with gr.Row():
|
| 251 |
+
file_upload = gr.UploadButton("⤴️Upload Image", file_types=["image"])
|
| 252 |
+
submit_file_btn = gr.Button("📩Submit Image")
|
| 253 |
+
regen_btn = gr.Button("♻️Regenerate")
|
| 254 |
+
empty_bin = gr.Button("🧼Clear History")
|
| 255 |
task_history = gr.State([])
|
| 256 |
|
| 257 |
submit_btn.click(
|