Spaces:
Runtime error
Runtime error
Commit
·
ceca68f
1
Parent(s):
e856cc7
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import gradio as gr
|
| 3 |
+
import pandas as pd
|
| 4 |
+
from functools import partial
|
| 5 |
+
from ai_classroom_suite.UIBaseComponents import *
|
| 6 |
+
|
| 7 |
+
### User Interface Chatbot Functions ###
|
| 8 |
+
def get_tutor_reply(chat_tutor):
|
| 9 |
+
chat_tutor.get_tutor_reply()
|
| 10 |
+
return gr.update(value="", interactive=True), chat_tutor.conversation_memory, chat_tutor
|
| 11 |
+
|
| 12 |
+
def get_conversation_history(chat_tutor):
|
| 13 |
+
return chat_tutor.conversation_memory, chat_tutor
|
| 14 |
+
|
| 15 |
+
### Instructor Interface Helper Functions ###
|
| 16 |
+
def get_instructor_prompt(fileobj):
|
| 17 |
+
# get file path
|
| 18 |
+
file_path = fileobj.name
|
| 19 |
+
with open(file_path, "r") as f:
|
| 20 |
+
instructor_prompt = f.read()
|
| 21 |
+
return instructor_prompt
|
| 22 |
+
|
| 23 |
+
def embed_prompt(prompt, chat_tutor):
|
| 24 |
+
# update secret
|
| 25 |
+
os.environ["SECRET_PROMPT"] = prompt
|
| 26 |
+
# update tutor
|
| 27 |
+
chat_tutor.learning_objectives = prompt
|
| 28 |
+
return os.environ.get("SECRET_PROMPT"), chat_tutor
|
| 29 |
+
|
| 30 |
+
### User Interfaces ###
|
| 31 |
+
with gr.Blocks() as demo:
|
| 32 |
+
#initialize tutor (with state)
|
| 33 |
+
study_tutor = gr.State(SlightlyDelusionalTutor())
|
| 34 |
+
|
| 35 |
+
# Student interface
|
| 36 |
+
with gr.Tab("For Students"):
|
| 37 |
+
|
| 38 |
+
# Chatbot interface
|
| 39 |
+
gr.Markdown("""
|
| 40 |
+
## Chat with the Model
|
| 41 |
+
Description here
|
| 42 |
+
""")
|
| 43 |
+
|
| 44 |
+
with gr.Row(equal_height=True):
|
| 45 |
+
with gr.Column(scale=2):
|
| 46 |
+
chatbot = gr.Chatbot()
|
| 47 |
+
with gr.Row():
|
| 48 |
+
user_chat_input = gr.Textbox(label="User input", scale=9)
|
| 49 |
+
user_chat_submit = gr.Button("Ask/answer model", scale=1)
|
| 50 |
+
|
| 51 |
+
# First add user's message to the conversation history
|
| 52 |
+
# Then get reply from the tutor and add that to the conversation history
|
| 53 |
+
user_chat_submit.click(
|
| 54 |
+
fn = add_user_message, inputs = [user_chat_input, study_tutor], outputs = [user_chat_input, chatbot, study_tutor], queue=False
|
| 55 |
+
).then(
|
| 56 |
+
fn = get_tutor_reply, inputs = [study_tutor], outputs = [user_chat_input, chatbot, study_tutor], queue=True
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Testing the chat history storage, can be deleted at deployment
|
| 60 |
+
with gr.Blocks():
|
| 61 |
+
test_btn = gr.Button("View your chat history")
|
| 62 |
+
chat_history = gr.JSON(label = "conversation history")
|
| 63 |
+
test_btn.click(get_conversation_history, inputs=[study_tutor], outputs=[chat_history, study_tutor])
|
| 64 |
+
|
| 65 |
+
# Download conversation history file
|
| 66 |
+
with gr.Blocks():
|
| 67 |
+
gr.Markdown("""
|
| 68 |
+
## Export Your Chat History
|
| 69 |
+
Export your chat history as a .json, .txt, or .csv file
|
| 70 |
+
""")
|
| 71 |
+
with gr.Row():
|
| 72 |
+
export_dialogue_button_json = gr.Button("JSON")
|
| 73 |
+
export_dialogue_button_txt = gr.Button("TXT")
|
| 74 |
+
export_dialogue_button_csv = gr.Button("CSV")
|
| 75 |
+
|
| 76 |
+
file_download = gr.Files(label="Download here", file_types=['.json', '.txt', '.csv'], type="file", visible=False)
|
| 77 |
+
|
| 78 |
+
export_dialogue_button_json.click(save_json, study_tutor, file_download, show_progress=True)
|
| 79 |
+
export_dialogue_button_txt.click(save_txt, study_tutor, file_download, show_progress=True)
|
| 80 |
+
export_dialogue_button_csv.click(save_csv, study_tutor, file_download, show_progress=True)
|
| 81 |
+
|
| 82 |
+
# Instructor interface
|
| 83 |
+
with gr.Tab("Instructor Only"):
|
| 84 |
+
"""
|
| 85 |
+
API Authentication functionality
|
| 86 |
+
Instead of ask students to provide key, the key is now provided by the instructor.
|
| 87 |
+
To permanently set the key, go to Settings -> Variables and secrets -> Secrets,
|
| 88 |
+
then replace OPENAI_API_KEY value with whatever openai key of the instructor.
|
| 89 |
+
"""
|
| 90 |
+
api_input = gr.Textbox(show_label=False, type="password", visible=False, value=os.environ.get("OPENAI_API_KEY"))
|
| 91 |
+
|
| 92 |
+
# Upload secret prompt functionality
|
| 93 |
+
# The instructor will provide a secret prompt/persona to the tutor
|
| 94 |
+
with gr.Blocks():
|
| 95 |
+
# testing purpose, change visible to False at deployment
|
| 96 |
+
view_secret = gr.Textbox(label="Current secret prompt", value=os.environ.get("SECRET_PROMPT"), visible=False)
|
| 97 |
+
|
| 98 |
+
# Prompt instructor to upload the secret file
|
| 99 |
+
file_input = gr.File(label="Load a .txt or .py file", file_types=['.py', '.txt'], type="file", elem_classes="short-height")
|
| 100 |
+
|
| 101 |
+
# Verify prompt content
|
| 102 |
+
instructor_prompt = gr.Textbox(label="Verify your prompt content", visible=True)
|
| 103 |
+
file_input.upload(fn=get_instructor_prompt, inputs=file_input, outputs=instructor_prompt)
|
| 104 |
+
|
| 105 |
+
# Placeholders components
|
| 106 |
+
text_input_none = gr.Textbox(visible=False)
|
| 107 |
+
file_input_none = gr.File(visible=False)
|
| 108 |
+
instructor_input_none = gr.TextArea(visible=False)
|
| 109 |
+
learning_objectives_none = gr.Textbox(visible=False)
|
| 110 |
+
|
| 111 |
+
# Set the secret prompt in this session and embed it to the study tutor
|
| 112 |
+
prompt_submit_btn = gr.Button("Submit")
|
| 113 |
+
prompt_submit_btn.click(
|
| 114 |
+
fn=embed_prompt, inputs=[instructor_prompt, study_tutor], outputs=[view_secret, study_tutor]
|
| 115 |
+
).then(
|
| 116 |
+
fn=create_reference_store,
|
| 117 |
+
inputs=[study_tutor, prompt_submit_btn, instructor_prompt, file_input_none, instructor_input_none, api_input, instructor_prompt],
|
| 118 |
+
outputs=[study_tutor, prompt_submit_btn]
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
# TODO: The instructor prompt is now only set in session if not go to Settings/secret,
|
| 122 |
+
# to "permanently" set the secret prompt not seen by the students who use this space,
|
| 123 |
+
# one possible way is to recreate the instructor interface in another space,
|
| 124 |
+
# and load it here to chain with the student interface
|
| 125 |
+
|
| 126 |
+
# TODO: Currently, the instructor prompt is handled as text input and stored in the vector store (and in the learning objective),
|
| 127 |
+
# which means the tutor now is still a question-answering tutor who viewed the prompt as context (but not really acting based on it).
|
| 128 |
+
# We need to find a way to provide the prompt directly to the model and set its status.
|
| 129 |
+
|
| 130 |
+
demo.queue().launch(server_name='0.0.0.0', server_port=7860)
|