deepseek-play / app.py
jerjam's picture
initial commit
14fdd3c verified
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr
# 加载 DeepSeek 聊天模型
model_name = "deepseek-ai/deepseek-llm-67b-chat"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloat16, device_map="auto")
def chat_with_deepseek(user_input):
messages = [{"role": "user", "content": user_input}]
input_tensor = tokenizer.apply_chat_template(messages, add_generation_prompt=True, return_tensors="pt")
outputs = model.generate(input_tensor.to(model.device), max_new_tokens=100)
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
return result
# 创建 Gradio 接口
demo = gr.Interface(fn=chat_with_deepseek,
inputs="text",
outputs="text",
title="DeepSeek Chatbot",
description="与 DeepSeek 聊天机器人对话!")
# 启动应用程序
if __name__ == "__main__":
demo.launch()