5to9 commited on
Commit
a395bec
·
1 Parent(s): 8c0b1b0

passings args to function again

Browse files
Files changed (1) hide show
  1. app.py +5 -11
app.py CHANGED
@@ -2,7 +2,7 @@ import os
2
  import random
3
  import spaces
4
  import torch
5
- from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer, BitsAndBytesConfig
6
  import gradio as gr
7
  from threading import Thread
8
  import logging
@@ -28,7 +28,7 @@ examples = ['🛁 Schreibe einen Werbetext über ein platzsparendes Badmöbel.',
28
  '🛌 Schreibe einen Werbetext über Quantenmechanik am Beispiel eines Babybetts.',
29
  '🗄️ Schreibe einen Werbetext über eine Kommode aus veganen Fleischbällchen.',
30
  ]
31
- nexted_examples = [[example] for example in random.sample(examples, 3)]
32
 
33
 
34
  device = "cuda" # for GPU usage or "cpu" for CPU usage
@@ -51,14 +51,9 @@ def stream_chat(
51
  ):
52
  logging.debug(f'message: {message}, system_prompt: {system_prompt}, temperature: {temperature}, max_new_tokens: {max_new_tokens}')
53
 
54
- """conversation = [
55
  {"role": "system", "content": ""}
56
  ]
57
- for prompt, answer in history:
58
- conversation.extend([
59
- {"role": "user", "content": prompt},
60
- {"role": "assistant", "content": answer},
61
- ])
62
 
63
  conversation.append({"role": "user", "content": message})
64
 
@@ -85,8 +80,7 @@ def stream_chat(
85
  buffer = ""
86
  for new_text in streamer:
87
  buffer += new_text
88
- yield buffer"""
89
- return "OK"
90
 
91
 
92
  chatbot = gr.Chatbot(height=400, placeholder=PLACEHOLDER)
@@ -140,7 +134,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
140
  render=False,
141
  ),
142
  ],
143
- examples=nexted_examples,
144
  cache_examples=False,
145
  )
146
 
 
2
  import random
3
  import spaces
4
  import torch
5
+ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
6
  import gradio as gr
7
  from threading import Thread
8
  import logging
 
28
  '🛌 Schreibe einen Werbetext über Quantenmechanik am Beispiel eines Babybetts.',
29
  '🗄️ Schreibe einen Werbetext über eine Kommode aus veganen Fleischbällchen.',
30
  ]
31
+ nested_examples = [[example] for example in random.sample(examples, 3)]
32
 
33
 
34
  device = "cuda" # for GPU usage or "cpu" for CPU usage
 
51
  ):
52
  logging.debug(f'message: {message}, system_prompt: {system_prompt}, temperature: {temperature}, max_new_tokens: {max_new_tokens}')
53
 
54
+ conversation = [
55
  {"role": "system", "content": ""}
56
  ]
 
 
 
 
 
57
 
58
  conversation.append({"role": "user", "content": message})
59
 
 
80
  buffer = ""
81
  for new_text in streamer:
82
  buffer += new_text
83
+ yield buffer
 
84
 
85
 
86
  chatbot = gr.Chatbot(height=400, placeholder=PLACEHOLDER)
 
134
  render=False,
135
  ),
136
  ],
137
+ examples=nested_examples,
138
  cache_examples=False,
139
  )
140