danf commited on
Commit
09e5658
ยท
verified ยท
1 Parent(s): 4b04b6d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +165 -107
app.py CHANGED
@@ -4,7 +4,7 @@ import logging
4
  import os
5
  from functools import lru_cache
6
  from threading import Thread
7
- from typing import Generator, List, Tuple
8
 
9
  import gradio as gr
10
  import regex
@@ -41,7 +41,7 @@ def load_model_and_tokenizer():
41
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
42
  model = AutoModelForCausalLM.from_pretrained(
43
  MODEL_NAME,
44
- torch_dtype=torch.float16,
45
  device_map="auto",
46
  )
47
  logger.info("Model loaded successfully")
@@ -54,24 +54,21 @@ DEFAULT_LANG = "he"
54
 
55
  labels = {
56
  "he": {
57
- "title": "ืžืชืžื˜ื™ื‘ื•ื˜ ๐Ÿงฎ",
58
  "intro": (
59
- """
60
- ื‘ืจื•ื›ื™ื ื”ื‘ืื™ื ืœื“ืžื•! ๐Ÿ’ก ื›ืืŸ ืชื•ื›ืœื• ืœื”ืชืจืฉื **ืžืžื•ื“ืœ ื”ืฉืคื” ื”ื—ื“ืฉ** ืฉืœื ื•; ืžื•ื“ืœ ื‘ื’ื•ื“ืœ 4 ืžื™ืœื™ืืจื“ ืคืจืžื˜ืจื™ื ืฉืื•ืžืŸ ืœืขื ื•ืช ืขืœ ืฉืืœื•ืช ืžืชืžื˜ื™ื•ืช ื‘ืขื‘ืจื™ืช, ืขืœ ื”ืžื—ืฉื‘ ืฉืœื›ื, ืœืœื ื—ื™ื‘ื•ืจ ืœืจืฉืช.
61
-
62
- ืงื™ืฉื•ืจ ืœืžื•ื“ืœ, ืคืจื˜ื™ื ื ื•ืกืคื™ื, ื™ืฆื™ืจืช ืงืฉืจ ื•ืชื ืื™ ืฉื™ืžื•ืฉ:
63
-
64
- https://huggingface.co/Intel/hebrew-math-tutor-v1
65
-
66
- -----
67
- """
68
  ),
69
  "select_label": "ื‘ื—ืจื• ืฉืืœื” ืžื•ื›ื ื” ืื• ืฆืจื• ืฉืืœื” ื—ื“ืฉื”:",
70
  "new_question": "ืฉืืœื” ื—ื“ืฉื”...",
71
  "text_label": "ืฉืืœื”:",
72
  "placeholder": "ื”ื–ื™ื ื• ืืช ื”ืฉืืœื” ื›ืืŸ...",
73
  "send": "ืฉืœื—",
74
- "reset": "ืฉื™ื—ื” ื—ื“ืฉื”",
75
  "toggle_to": "English ๐Ÿ‡ฌ๐Ÿ‡ง",
76
  "predefined": [
77
  "ืฉืืœื” ื—ื“ืฉื”...",
@@ -83,10 +80,10 @@ https://huggingface.co/Intel/hebrew-math-tutor-v1
83
  "thinking_prefix": "๐Ÿค” ื—ื•ืฉื‘",
84
  "thinking_done": "๐Ÿค” *ืชื”ืœื™ืš ื”ื—ืฉื™ื‘ื” ื”ื•ืฉืœื, ืžื›ื™ืŸ ืชืฉื•ื‘ื”...*",
85
  "final_label": "๐Ÿ“ ืชืฉื•ื‘ื” ืกื•ืคื™ืช:",
86
- "chat_label": "ืฆ'ืื˜",
87
  },
88
  "en": {
89
- "title": "MathBot ๐Ÿงฎ",
90
  "intro": (
91
  """
92
  Welcome to the demo! ๐Ÿ’ก Here you can try our **new language model** โ€” a 4-billion-parameter model trained to answer math questions in Hebrew while maintaining its English capabilities. It runs locally on your machine without requiring an internet connection.
@@ -103,7 +100,7 @@ https://huggingface.co/Intel/hebrew-math-tutor-v1
103
  "text_label": "Question:",
104
  "placeholder": "Type your question here...",
105
  "send": "Send",
106
- "reset": "New Conversation",
107
  "toggle_to": "ืขื‘ืจื™ืช ๐Ÿ‡ฎ๐Ÿ‡ฑ",
108
  "predefined": [
109
  "New question...",
@@ -115,12 +112,12 @@ https://huggingface.co/Intel/hebrew-math-tutor-v1
115
  "thinking_prefix": "๐Ÿค” Thinking",
116
  "thinking_done": "๐Ÿค” *Thinking complete, preparing answer...*",
117
  "final_label": "๐Ÿ“ Final answer:",
118
- "chat_label": "Chat",
119
  },
120
  }
121
 
122
 
123
- def dir_and_alignment(lang: str) -> Tuple[str, str]:
124
  if lang == "he":
125
  return "rtl", "right"
126
  return "ltr", "left"
@@ -175,7 +172,7 @@ def build_assistant_markdown(
175
  direction, align = dir_and_alignment(lang)
176
  localized = labels[lang]
177
 
178
- parts: List[str] = []
179
  if thinking_text:
180
  details = _details_template.format(
181
  dir=direction,
@@ -193,57 +190,52 @@ def build_assistant_markdown(
193
  return "\n\n".join(parts)
194
 
195
 
196
- @spaces.GPU
197
  def handle_user_message(
198
  user_input: str,
199
  lang: str,
200
- chat_history: List[Tuple[str, str]] | None,
201
  ) -> Generator[tuple, None, None]:
202
  lang = lang or DEFAULT_LANG
203
  localized = labels[lang]
204
- chat_history = chat_history or []
205
  prompt = (user_input or "").strip()
206
  if not prompt:
207
  yield (
208
- chat_history,
209
  "",
210
  localized["new_question"],
211
- chat_history,
212
  )
213
  return
214
 
215
- formatted_user = wrap_text_with_direction(prompt, lang)
216
- chat_history = chat_history + [(formatted_user, "")]
217
  dropdown_reset = localized["new_question"]
218
- yield chat_history, "", dropdown_reset, chat_history
219
 
220
  system_prompt = build_system_prompt(lang)
221
 
222
- # Format as chat template
223
  chat_messages = [
224
  {"role": "system", "content": system_prompt},
225
  {"role": "user", "content": prompt},
226
  ]
227
 
228
- # Apply chat template
229
  input_text = tokenizer.apply_chat_template(
230
  chat_messages,
231
- tokenize=False,
232
  add_generation_prompt=True,
 
233
  )
234
 
235
- inputs = tokenizer(input_text, return_tensors="pt").to(model.device)
236
 
237
- thinking_buffer = ""
238
  thinking_text: str | None = None
239
  final_answer = ""
240
  response_fallback = ""
241
- in_thinking = False
242
 
243
  try:
244
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
245
  generation_kwargs = dict(
246
- **inputs,
247
  streamer=streamer,
248
  max_new_tokens=2400,
249
  temperature=0.6,
@@ -261,8 +253,8 @@ def handle_user_message(
261
 
262
  response_fallback += delta
263
 
264
- if "<think>" in delta:
265
- in_thinking = True
266
 
267
  if in_thinking:
268
  thinking_buffer += delta
@@ -280,34 +272,30 @@ def handle_user_message(
280
  thinking_text=thinking_text,
281
  )
282
 
283
- chat_history[-1] = (formatted_user, current_answer)
284
- yield chat_history, "", dropdown_reset, chat_history
285
 
286
  thread.join()
287
 
288
  except Exception as exc:
289
  error_html = wrap_text_with_direction(f"โš ๏ธ Error generating response: {exc}", lang)
290
- chat_history[-1] = (formatted_user, error_html)
291
- yield chat_history, prompt, dropdown_reset, chat_history
292
  return
293
 
294
  if not final_answer:
295
  final_answer = response_fallback
296
 
297
- chat_history[-1] = (
298
- formatted_user,
299
- build_assistant_markdown(lang=lang, final_answer=final_answer, thinking_text=thinking_text),
300
  )
301
- yield chat_history, "", dropdown_reset, chat_history
302
 
303
 
304
- def reset_conversation(lang: str):
305
  localized = labels[lang]
306
  return (
307
- [],
308
  "",
309
  localized["new_question"],
310
- [],
311
  )
312
 
313
 
@@ -322,62 +310,140 @@ def toggle_language_and_rebuild(lang: str):
322
  """Toggle language and return new values for all UI components"""
323
  new_lang = "en" if lang == "he" else "he"
324
  localized = labels[new_lang]
 
 
 
325
  return (
326
- new_lang, # lang_state
327
- f"# {localized['title']}", # title_md
328
- localized["intro"], # intro_md
329
- gr.Dropdown(
330
- label=localized["select_label"],
331
  choices=localized["predefined"],
332
  value=localized["new_question"],
333
- interactive=True,
334
- ), # preset_dropdown
335
- gr.Textbox(
336
- label=localized["text_label"],
337
- placeholder=localized["placeholder"],
338
- lines=5,
339
- value="",
340
- ), # question_box
341
- localized["send"], # send_button value
342
- localized["reset"], # reset_button value
343
- localized["toggle_to"], # lang_button value
344
- localized["chat_label"], # chatbot label
345
  )
346
 
347
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
348
  def build_demo() -> gr.Blocks:
349
  localized = labels[DEFAULT_LANG]
350
  with gr.Blocks(title="Hebrew Math Tutor") as demo:
 
 
 
351
  lang_state = gr.State(DEFAULT_LANG)
352
- chat_state = gr.State([])
353
 
354
- title_md = gr.Markdown(f"# {localized['title']}")
355
  intro_md = gr.Markdown(localized["intro"])
356
 
357
- with gr.Row():
358
- preset_dropdown = gr.Dropdown(
359
- label=localized["select_label"],
360
- choices=localized["predefined"],
361
- value=localized["new_question"],
362
- interactive=True,
363
  )
364
- lang_button = gr.Button(localized["toggle_to"], variant="secondary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
365
 
366
  question_box = gr.Textbox(
367
- label=localized["text_label"],
368
  placeholder=localized["placeholder"],
369
  lines=5,
 
 
370
  )
371
 
372
  with gr.Row():
373
  reset_button = gr.Button(localized["reset"], variant="secondary")
374
  send_button = gr.Button(localized["send"], variant="primary")
 
375
 
376
- chatbot = gr.Chatbot(
377
- label=localized["chat_label"],
378
- height=520,
379
- render_markdown=True,
380
  )
 
 
 
 
 
 
 
 
 
381
 
382
  preset_dropdown.change(
383
  fn=sync_question_text,
@@ -386,30 +452,40 @@ def build_demo() -> gr.Blocks:
386
  )
387
 
388
  reset_button.click(
389
- fn=reset_conversation,
390
  inputs=[lang_state],
391
- outputs=[chatbot, question_box, preset_dropdown, chat_state],
392
  )
393
 
394
  send_button.click(
395
  fn=handle_user_message,
396
- inputs=[question_box, lang_state, chat_state],
397
- outputs=[chatbot, question_box, preset_dropdown, chat_state],
398
  )
399
 
400
  question_box.submit(
401
  fn=handle_user_message,
402
- inputs=[question_box, lang_state, chat_state],
403
- outputs=[chatbot, question_box, preset_dropdown, chat_state],
404
  )
405
 
406
- # Simplified language toggle - just updates the state
407
- # In a production app, you'd want to rebuild the entire UI
408
- # For now, we'll just update the state (labels won't change dynamically)
409
  lang_button.click(
410
- fn=lambda lang: "en" if lang == "he" else "he",
411
  inputs=[lang_state],
412
- outputs=[lang_state],
 
 
 
 
 
 
 
 
 
 
 
 
413
  )
414
 
415
  return demo
@@ -419,21 +495,3 @@ demo = build_demo()
419
 
420
  if __name__ == "__main__":
421
  demo.queue().launch()
422
-
423
-
424
- demo = build_demo()
425
-
426
- CUSTOM_CSS = """
427
- body {
428
- font-family: 'Rubik', 'Segoe UI', 'Helvetica Neue', Arial, sans-serif;
429
- }
430
- details > summary {
431
- cursor: pointer;
432
- }
433
- .gradio-container .prose p {
434
- margin-bottom: 0.5rem;
435
- }
436
- """
437
-
438
- if __name__ == "__main__":
439
- demo.queue().launch(css=CUSTOM_CSS)
 
4
  import os
5
  from functools import lru_cache
6
  from threading import Thread
7
+ from typing import Generator
8
 
9
  import gradio as gr
10
  import regex
 
41
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
42
  model = AutoModelForCausalLM.from_pretrained(
43
  MODEL_NAME,
44
+ dtype=torch.bfloat16,
45
  device_map="auto",
46
  )
47
  logger.info("Model loaded successfully")
 
54
 
55
  labels = {
56
  "he": {
57
+ "title": '<h1 dir="rtl" style="text-align: right;">ืžืชืžื˜ื™ื‘ื•ื˜ ๐Ÿงฎ</h1>',
58
  "intro": (
59
+ '<div dir="rtl" style="text-align: right;">\n\n'
60
+ "ื‘ืจื•ื›ื™ื ื”ื‘ืื™ื ืœื“ืžื•! ๐Ÿ’ก ื›ืืŸ ืชื•ื›ืœื• ืœื”ืชืจืฉื **ืžืžื•ื“ืœ ื”ืฉืคื” ื”ื—ื“ืฉ** ืฉืœื ื•; ืžื•ื“ืœ ื‘ื’ื•ื“ืœ 4 ืžื™ืœื™ืืจื“ ืคืจืžื˜ืจื™ื ืฉืื•ืžืŸ ืœืขื ื•ืช ืขืœ ืฉืืœื•ืช ืžืชืžื˜ื™ื•ืช ื‘ืขื‘ืจื™ืช, ืขืœ ื”ืžื—ืฉื‘ ืฉืœื›ื, ืœืœื ื—ื™ื‘ื•ืจ ืœืจืฉืช.\n\n"
61
+ "ืงื™ืฉื•ืจ ืœืžื•ื“ืœ, ืคืจื˜ื™ื ื ื•ืกืคื™ื, ื™ืฆื™ืจืช ืงืฉืจ ื•ืชื ืื™ ืฉื™ืžื•ืฉ:\n\n"
62
+ "https://huggingface.co/Intel/hebrew-math-tutor-v1\n\n"
63
+ "-----\n\n"
64
+ "</div>"
 
 
 
65
  ),
66
  "select_label": "ื‘ื—ืจื• ืฉืืœื” ืžื•ื›ื ื” ืื• ืฆืจื• ืฉืืœื” ื—ื“ืฉื”:",
67
  "new_question": "ืฉืืœื” ื—ื“ืฉื”...",
68
  "text_label": "ืฉืืœื”:",
69
  "placeholder": "ื”ื–ื™ื ื• ืืช ื”ืฉืืœื” ื›ืืŸ...",
70
  "send": "ืฉืœื—",
71
+ "reset": "ื ืงื”",
72
  "toggle_to": "English ๐Ÿ‡ฌ๐Ÿ‡ง",
73
  "predefined": [
74
  "ืฉืืœื” ื—ื“ืฉื”...",
 
80
  "thinking_prefix": "๐Ÿค” ื—ื•ืฉื‘",
81
  "thinking_done": "๐Ÿค” *ืชื”ืœื™ืš ื”ื—ืฉื™ื‘ื” ื”ื•ืฉืœื, ืžื›ื™ืŸ ืชืฉื•ื‘ื”...*",
82
  "final_label": "๐Ÿ“ ืชืฉื•ื‘ื” ืกื•ืคื™ืช:",
83
+ "answer_label": "ืชืฉื•ื‘ื”:",
84
  },
85
  "en": {
86
+ "title": "<h1>MathBot ๐Ÿงฎ</h1>",
87
  "intro": (
88
  """
89
  Welcome to the demo! ๐Ÿ’ก Here you can try our **new language model** โ€” a 4-billion-parameter model trained to answer math questions in Hebrew while maintaining its English capabilities. It runs locally on your machine without requiring an internet connection.
 
100
  "text_label": "Question:",
101
  "placeholder": "Type your question here...",
102
  "send": "Send",
103
+ "reset": "Clear",
104
  "toggle_to": "ืขื‘ืจื™ืช ๐Ÿ‡ฎ๐Ÿ‡ฑ",
105
  "predefined": [
106
  "New question...",
 
112
  "thinking_prefix": "๐Ÿค” Thinking",
113
  "thinking_done": "๐Ÿค” *Thinking complete, preparing answer...*",
114
  "final_label": "๐Ÿ“ Final answer:",
115
+ "answer_label": "Answer:",
116
  },
117
  }
118
 
119
 
120
+ def dir_and_alignment(lang: str) -> tuple[str, str]:
121
  if lang == "he":
122
  return "rtl", "right"
123
  return "ltr", "left"
 
172
  direction, align = dir_and_alignment(lang)
173
  localized = labels[lang]
174
 
175
+ parts = []
176
  if thinking_text:
177
  details = _details_template.format(
178
  dir=direction,
 
190
  return "\n\n".join(parts)
191
 
192
 
193
+ # @spaces.GPU
194
  def handle_user_message(
195
  user_input: str,
196
  lang: str,
 
197
  ) -> Generator[tuple, None, None]:
198
  lang = lang or DEFAULT_LANG
199
  localized = labels[lang]
 
200
  prompt = (user_input or "").strip()
201
  if not prompt:
202
  yield (
203
+ "",
204
  "",
205
  localized["new_question"],
 
206
  )
207
  return
208
 
 
 
209
  dropdown_reset = localized["new_question"]
210
+ yield "", "", dropdown_reset
211
 
212
  system_prompt = build_system_prompt(lang)
213
 
214
+ # Apply chat template with just user message
215
  chat_messages = [
216
  {"role": "system", "content": system_prompt},
217
  {"role": "user", "content": prompt},
218
  ]
219
 
 
220
  input_text = tokenizer.apply_chat_template(
221
  chat_messages,
222
+ tokenize=True,
223
  add_generation_prompt=True,
224
+ enable_thinking=True,
225
  )
226
 
227
+ inputs = torch.tensor([input_text]).to(model.device)
228
 
229
+ thinking_buffer = "<think>"
230
  thinking_text: str | None = None
231
  final_answer = ""
232
  response_fallback = ""
233
+ in_thinking = True
234
 
235
  try:
236
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
237
  generation_kwargs = dict(
238
+ inputs=inputs,
239
  streamer=streamer,
240
  max_new_tokens=2400,
241
  temperature=0.6,
 
253
 
254
  response_fallback += delta
255
 
256
+ # if "<think>" in delta:
257
+ # in_thinking = True
258
 
259
  if in_thinking:
260
  thinking_buffer += delta
 
272
  thinking_text=thinking_text,
273
  )
274
 
275
+ yield current_answer, "", dropdown_reset
 
276
 
277
  thread.join()
278
 
279
  except Exception as exc:
280
  error_html = wrap_text_with_direction(f"โš ๏ธ Error generating response: {exc}", lang)
281
+ yield error_html, prompt, dropdown_reset
 
282
  return
283
 
284
  if not final_answer:
285
  final_answer = response_fallback
286
 
287
+ final_markdown = build_assistant_markdown(
288
+ lang=lang, final_answer=final_answer, thinking_text=thinking_text
 
289
  )
290
+ yield final_markdown, "", dropdown_reset
291
 
292
 
293
+ def clear_answer(lang: str):
294
  localized = labels[lang]
295
  return (
296
+ "",
297
  "",
298
  localized["new_question"],
 
299
  )
300
 
301
 
 
310
  """Toggle language and return new values for all UI components"""
311
  new_lang = "en" if lang == "he" else "he"
312
  localized = labels[new_lang]
313
+ # direction, align = dir_and_alignment(new_lang)
314
+ dropdown_class = "rtl" if new_lang == "he" else "ltr"
315
+ textbox_class = "rtl" if new_lang == "he" else "ltr"
316
  return (
317
+ new_lang,
318
+ localized["title"],
319
+ localized["intro"],
320
+ wrap_text_with_direction(localized["select_label"], new_lang, emphasized=True),
321
+ gr.update(
322
  choices=localized["predefined"],
323
  value=localized["new_question"],
324
+ elem_classes=[dropdown_class],
325
+ ),
326
+ wrap_text_with_direction(localized["text_label"], new_lang, emphasized=True),
327
+ gr.update(
328
+ placeholder=localized["placeholder"], value="", elem_classes=[textbox_class]
329
+ ),
330
+ localized["send"],
331
+ localized["reset"],
332
+ wrap_text_with_direction(localized["answer_label"], new_lang, emphasized=True),
333
+ localized["toggle_to"],
 
 
334
  )
335
 
336
 
337
+ CUSTOM_CSS = """
338
+ body {
339
+ font-family: 'Rubik', 'Segoe UI', 'Helvetica Neue', Arial, sans-serif;
340
+ }
341
+ details > summary {
342
+ cursor: pointer;
343
+ }
344
+ .gradio-container .prose p {
345
+ margin-bottom: 0.5rem;
346
+ }
347
+ [dir="rtl"] {
348
+ direction: rtl;
349
+ text-align: right;
350
+ }
351
+ [dir="ltr"] {
352
+ direction: ltr;
353
+ text-align: left;
354
+ }
355
+ /* Alignment helpers for inputs and dropdowns using elem_classes */
356
+ .rtl textarea, .rtl .gr-dropdown {
357
+ direction: rtl !important;
358
+ text-align: right !important;
359
+ }
360
+ .ltr textarea, .ltr .gr-dropdown {
361
+ direction: ltr !important;
362
+ text-align: left !important;
363
+ }
364
+ .rtl input, .rtl select, .rtl .gr-dropdown, .rtl .gradio-dropdown {
365
+ direction: rtl !important;
366
+ text-align: right !important;
367
+ }
368
+ .ltr input, .ltr select, .ltr .gr-dropdown, .ltr .gradio-dropdown {
369
+ direction: ltr !important;
370
+ text-align: left !important;
371
+ }
372
+
373
+ /* Visual container for the answer area */
374
+ .answer-box {
375
+ background: #ffffff !important;
376
+ border: 1px solid rgba(0,0,0,0.08);
377
+ border-radius: 10px;
378
+ padding: 12px 16px;
379
+ box-shadow: none;
380
+ margin-top: 8px;
381
+ }
382
+ .answer-box .prose {
383
+ margin: 0;
384
+ }
385
+ """
386
+
387
+
388
  def build_demo() -> gr.Blocks:
389
  localized = labels[DEFAULT_LANG]
390
  with gr.Blocks(title="Hebrew Math Tutor") as demo:
391
+ # Inject custom CSS
392
+ gr.HTML(f"<style>{CUSTOM_CSS}</style>")
393
+
394
  lang_state = gr.State(DEFAULT_LANG)
 
395
 
396
+ title_md = gr.Markdown(localized["title"])
397
  intro_md = gr.Markdown(localized["intro"])
398
 
399
+ select_label_md = gr.Markdown(
400
+ wrap_text_with_direction(
401
+ localized["select_label"], DEFAULT_LANG, emphasized=True
 
 
 
402
  )
403
+ )
404
+
405
+ preset_dropdown = gr.Dropdown(
406
+ label="",
407
+ choices=localized["predefined"],
408
+ value=localized["new_question"],
409
+ interactive=True,
410
+ show_label=False,
411
+ elem_classes=("rtl" if DEFAULT_LANG == "he" else "ltr"),
412
+ )
413
+
414
+ question_label_md = gr.Markdown(
415
+ wrap_text_with_direction(
416
+ localized["text_label"], DEFAULT_LANG, emphasized=True
417
+ )
418
+ )
419
 
420
  question_box = gr.Textbox(
421
+ label="",
422
  placeholder=localized["placeholder"],
423
  lines=5,
424
+ show_label=False,
425
+ elem_classes=("rtl" if DEFAULT_LANG == "he" else "ltr"),
426
  )
427
 
428
  with gr.Row():
429
  reset_button = gr.Button(localized["reset"], variant="secondary")
430
  send_button = gr.Button(localized["send"], variant="primary")
431
+ lang_button = gr.Button(localized["toggle_to"], variant="secondary")
432
 
433
+ answer_label_md = gr.Markdown(
434
+ wrap_text_with_direction(
435
+ localized["answer_label"], DEFAULT_LANG, emphasized=True
436
+ )
437
  )
438
+ with gr.Group(elem_classes="answer-box"):
439
+ answer_box = gr.Markdown(
440
+ label="",
441
+ show_label=False,
442
+ latex_delimiters=[
443
+ {"left": "$$", "right": "$$", "display": True},
444
+ {"left": "$", "right": "$", "display": False},
445
+ ],
446
+ )
447
 
448
  preset_dropdown.change(
449
  fn=sync_question_text,
 
452
  )
453
 
454
  reset_button.click(
455
+ fn=clear_answer,
456
  inputs=[lang_state],
457
+ outputs=[answer_box, question_box, preset_dropdown],
458
  )
459
 
460
  send_button.click(
461
  fn=handle_user_message,
462
+ inputs=[question_box, lang_state],
463
+ outputs=[answer_box, question_box, preset_dropdown],
464
  )
465
 
466
  question_box.submit(
467
  fn=handle_user_message,
468
+ inputs=[question_box, lang_state],
469
+ outputs=[answer_box, question_box, preset_dropdown],
470
  )
471
 
472
+ # Language toggle - updates state and UI labels
 
 
473
  lang_button.click(
474
+ fn=toggle_language_and_rebuild,
475
  inputs=[lang_state],
476
+ outputs=[
477
+ lang_state,
478
+ title_md,
479
+ intro_md,
480
+ select_label_md,
481
+ preset_dropdown,
482
+ question_label_md,
483
+ question_box,
484
+ send_button,
485
+ reset_button,
486
+ answer_label_md,
487
+ lang_button,
488
+ ],
489
  )
490
 
491
  return demo
 
495
 
496
  if __name__ == "__main__":
497
  demo.queue().launch()