Keeby-smilyai commited on
Commit
eb1a5a2
·
verified ·
1 Parent(s): 9c6c9ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -8
app.py CHANGED
@@ -478,22 +478,50 @@ with gr.Blocks(theme=gr.themes.Soft(), title="Chat with Sam") as demo:
478
  """)
479
 
480
  # Event handlers
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
481
  msg.submit(
482
- chat_interface,
483
- inputs=[msg, chatbot, temperature, top_p, top_k, max_tokens],
484
- outputs=msg,
485
  ).then(
486
- lambda: gr.update(value=""),
487
  None,
488
  msg
489
  )
490
 
491
  submit.click(
492
- chat_interface,
493
- inputs=[msg, chatbot, temperature, top_p, top_k, max_tokens],
494
- outputs=msg,
495
  ).then(
496
- lambda: gr.update(value=""),
497
  None,
498
  msg
499
  )
 
478
  """)
479
 
480
  # Event handlers
481
+ def respond(message, chat_history, temperature, top_p, top_k, max_tokens):
482
+ """Handle message and generate response"""
483
+ # Add user message to history
484
+ chat_history.append([message, None])
485
+
486
+ # Build conversation context from history (last 3 turns to save tokens)
487
+ conversation = ""
488
+ recent_history = chat_history[:-1][-3:] if len(chat_history) > 1 else []
489
+
490
+ for user_msg, bot_msg in recent_history:
491
+ if bot_msg: # Only include completed turns
492
+ conversation += f"User: {user_msg}\nSam: {bot_msg}\n"
493
+
494
+ # Add current message
495
+ full_prompt = conversation + message if conversation else message
496
+
497
+ # Generate with streaming
498
+ chat_history[-1][1] = ""
499
+ for response_chunk in generate_stream(
500
+ full_prompt,
501
+ max_new_tokens=max_tokens,
502
+ temperature=temperature,
503
+ top_p=top_p,
504
+ top_k=top_k
505
+ ):
506
+ chat_history[-1][1] = response_chunk
507
+ yield chat_history
508
+
509
  msg.submit(
510
+ respond,
511
+ [msg, chatbot, temperature, top_p, top_k, max_tokens],
512
+ chatbot
513
  ).then(
514
+ lambda: gr.Textbox(value=""),
515
  None,
516
  msg
517
  )
518
 
519
  submit.click(
520
+ respond,
521
+ [msg, chatbot, temperature, top_p, top_k, max_tokens],
522
+ chatbot
523
  ).then(
524
+ lambda: gr.Textbox(value=""),
525
  None,
526
  msg
527
  )