import interpreter import gradio as gr import os import io import contextlib # Paste your OpenAI API key below. interpreter.api_key = os.environ.get('openai_api_key') interpreter.auto_run = True def chat_with_interpreter(message, history=[]): if message == 'reset': interpreter.reset() # Redirect stdout to capture the streamed output new_stdout = io.StringIO() with contextlib.redirect_stdout(new_stdout): interpreter.chat(message) output = new_stdout.getvalue() # Return this output so Gradio's ChatInterface can display it return output demo = gr.ChatInterface(fn=chat_with_interpreter, title="Open-Interpreter Gradio ChatInterface", description="Open Interpreter lets LLMs run code (Python, Javascript, Shell, and more) locally", examples=["what is 2+2?", "Can you solve for x: 10x -65=0?", "What are top 10 headlines from BBC from last week?"], clear_btn=None, retry_btn=None, undo_btn=None).queue() demo.launch(debug=True)