Update app.py
Browse files
app.py
CHANGED
|
@@ -16,9 +16,13 @@ generator = pipeline(
|
|
| 16 |
)
|
| 17 |
|
| 18 |
# Chat history state
|
| 19 |
-
|
|
|
|
|
|
|
| 20 |
return []
|
| 21 |
|
|
|
|
|
|
|
| 22 |
def generate_and_record(
|
| 23 |
prompt, max_length, temperature, top_k, top_p, repetition_penalty, seed, num_return_sequences, history
|
| 24 |
):
|
|
@@ -38,12 +42,16 @@ def generate_and_record(
|
|
| 38 |
history.append({"prompt": prompt, "results": texts})
|
| 39 |
return "\n\n---\n\n".join(texts), history
|
| 40 |
|
| 41 |
-
|
|
|
|
|
|
|
| 42 |
path = "chat_history.json"
|
| 43 |
with open(path, "w", encoding="utf-8") as f:
|
| 44 |
json.dump(history, f, ensure_ascii=False, indent=2)
|
| 45 |
return path
|
| 46 |
|
|
|
|
|
|
|
| 47 |
with gr.Blocks(title="GPT Text Generation") as demo:
|
| 48 |
gr.Markdown("## Text Generation with openai-community/openai-gpt (CPU)")
|
| 49 |
|
|
@@ -74,4 +82,6 @@ with gr.Blocks(title="GPT Text Generation") as demo:
|
|
| 74 |
clear_btn.click(lambda _: ([], ""), inputs=[history_state], outputs=[history_state, output_text])
|
| 75 |
export_btn.click(fn=export_history, inputs=[history_state], outputs=[])
|
| 76 |
|
|
|
|
|
|
|
| 77 |
demo.queue().launch(server_name="0.0.0.0", server_port=7860)
|
|
|
|
| 16 |
)
|
| 17 |
|
| 18 |
# Chat history state
|
| 19 |
+
# Initialize empty history list
|
| 20 |
+
|
| 21 |
+
def init_history():
|
| 22 |
return []
|
| 23 |
|
| 24 |
+
# Generate text and record history
|
| 25 |
+
|
| 26 |
def generate_and_record(
|
| 27 |
prompt, max_length, temperature, top_k, top_p, repetition_penalty, seed, num_return_sequences, history
|
| 28 |
):
|
|
|
|
| 42 |
history.append({"prompt": prompt, "results": texts})
|
| 43 |
return "\n\n---\n\n".join(texts), history
|
| 44 |
|
| 45 |
+
# Export chat history to JSON file
|
| 46 |
+
|
| 47 |
+
def export_history(history):
|
| 48 |
path = "chat_history.json"
|
| 49 |
with open(path, "w", encoding="utf-8") as f:
|
| 50 |
json.dump(history, f, ensure_ascii=False, indent=2)
|
| 51 |
return path
|
| 52 |
|
| 53 |
+
# Build Gradio interface
|
| 54 |
+
|
| 55 |
with gr.Blocks(title="GPT Text Generation") as demo:
|
| 56 |
gr.Markdown("## Text Generation with openai-community/openai-gpt (CPU)")
|
| 57 |
|
|
|
|
| 82 |
clear_btn.click(lambda _: ([], ""), inputs=[history_state], outputs=[history_state, output_text])
|
| 83 |
export_btn.click(fn=export_history, inputs=[history_state], outputs=[])
|
| 84 |
|
| 85 |
+
# Launch app
|
| 86 |
+
|
| 87 |
demo.queue().launch(server_name="0.0.0.0", server_port=7860)
|