FlameF0X commited on
Commit
3786709
·
verified ·
1 Parent(s): 599fd08

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -16
app.py CHANGED
@@ -5,9 +5,6 @@ import torch
5
  from transformers import StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
6
  import threading
7
 
8
- # -----------------------------
9
- # 1. Load Tiny-Purr-1B
10
- # -----------------------------
11
  model_id = "purrgpt-community/Tiny-Purr-1B"
12
  tokenizer = AutoTokenizer.from_pretrained(model_id)
13
  model = AutoModelForCausalLM.from_pretrained(
@@ -17,9 +14,6 @@ model = AutoModelForCausalLM.from_pretrained(
17
  )
18
  model.eval()
19
 
20
- # -----------------------------
21
- # 2. Load PurrBERT safety model
22
- # -----------------------------
23
  purrbert_model = DistilBertForSequenceClassification.from_pretrained("purrgpt-community/PurrBERT-v1")
24
  purrbert_tokenizer = DistilBertTokenizerFast.from_pretrained("purrgpt-community/PurrBERT-v1")
25
  purrbert_model.eval()
@@ -32,9 +26,6 @@ SAFETY_RESPONSE = (
32
  "let's keep our conversations on the good side, okay? purrrr."
33
  )
34
 
35
- # -----------------------------
36
- # 3. Chat format / template
37
- # -----------------------------
38
  SYSTEM_PROMPT = (
39
  "<|startoftext|><|im_start|>system\n"
40
  "You are Tiny-Purr, a friendly, playful, cat-like AI assistant developed by PurrGPT Community. "
@@ -79,9 +70,6 @@ def clean_repetition(text, max_repeat=3):
79
  clean.append(line)
80
  return "\n".join(clean)
81
 
82
- # -----------------------------
83
- # 4. Streaming response function
84
- # -----------------------------
85
  def respond_stream(message, history):
86
  if not is_safe_prompt(message):
87
  yield SAFETY_RESPONSE
@@ -115,11 +103,8 @@ def respond_stream(message, history):
115
  buffer += token
116
  yield clean_repetition(buffer)
117
 
118
- # -----------------------------
119
- # 5. Gradio Blocks chat interface
120
- # -----------------------------
121
  with gr.Blocks() as demo:
122
- gr.Markdown("## Tiny-Purr-1B Chat (with PurrBERT safety)")
123
 
124
  chatbot = gr.Chatbot()
125
  msg = gr.Textbox(label="Your message", placeholder="Say something to Tiny-Purr...")
 
5
  from transformers import StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
6
  import threading
7
 
 
 
 
8
  model_id = "purrgpt-community/Tiny-Purr-1B"
9
  tokenizer = AutoTokenizer.from_pretrained(model_id)
10
  model = AutoModelForCausalLM.from_pretrained(
 
14
  )
15
  model.eval()
16
 
 
 
 
17
  purrbert_model = DistilBertForSequenceClassification.from_pretrained("purrgpt-community/PurrBERT-v1")
18
  purrbert_tokenizer = DistilBertTokenizerFast.from_pretrained("purrgpt-community/PurrBERT-v1")
19
  purrbert_model.eval()
 
26
  "let's keep our conversations on the good side, okay? purrrr."
27
  )
28
 
 
 
 
29
  SYSTEM_PROMPT = (
30
  "<|startoftext|><|im_start|>system\n"
31
  "You are Tiny-Purr, a friendly, playful, cat-like AI assistant developed by PurrGPT Community. "
 
70
  clean.append(line)
71
  return "\n".join(clean)
72
 
 
 
 
73
  def respond_stream(message, history):
74
  if not is_safe_prompt(message):
75
  yield SAFETY_RESPONSE
 
103
  buffer += token
104
  yield clean_repetition(buffer)
105
 
 
 
 
106
  with gr.Blocks() as demo:
107
+ gr.Markdown("## Tiny-Purr-1B Chat")
108
 
109
  chatbot = gr.Chatbot()
110
  msg = gr.Textbox(label="Your message", placeholder="Say something to Tiny-Purr...")