szili2011 commited on
Commit
69efabe
·
verified ·
1 Parent(s): 0a68069

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -38
app.py CHANGED
@@ -1,4 +1,4 @@
1
- # app.py (Hardened and Debuggable Version)
2
  import gradio as gr
3
  import tensorflow as tf
4
  import pickle
@@ -22,24 +22,27 @@ except Exception as e:
22
  print(f"FATAL ERROR loading files: {e}")
23
  successor_model, predecessor_model = None, None
24
 
25
- # --- 2. THE CORE PREDICTION LOGIC (MODIFIED) ---
26
- # This function now receives the actual model and tokenizer objects
27
- def predict_next_state(model, tokenizers, current_unit, current_analogy, current_commentary):
28
- if not model or not tokenizers:
29
- return "Error: A required model or tokenizer is not loaded.", "Check server logs.", "---"
30
 
31
- # Prepare input data
32
- input_data = {'current_unit_name': [current_unit], 'current_analogy': [current_analogy], 'current_commentary': [current_commentary]}
 
 
 
 
33
  processed_input = {}
34
  for col, text_list in input_data.items():
35
  sequences = tokenizers[col].texts_to_sequences(text_list)
36
  padded_sequences = tf.keras.preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQ_LENGTH, padding='post')
37
  processed_input[col] = padded_sequences
38
 
39
- # Get AI prediction
40
  predictions = model.predict(processed_input)
41
 
42
- # Decode prediction back to text
43
  target_texts = {}
44
  output_cols = ['target_unit_name', 'target_analogy', 'target_commentary']
45
  for i, col in enumerate(output_cols):
@@ -48,52 +51,72 @@ def predict_next_state(model, tokenizers, current_unit, current_analogy, current
48
  clean_text = ' '.join([word for word in predicted_sequence.split() if word not in ['<oov>', 'end']])
49
  target_texts[col] = clean_text.strip()
50
 
51
- # *** DEBUGGING PRINT ***
52
- print(f"--- PREDICTION DECODED ---")
53
  print(f"Decoded Unit Name: {target_texts['target_unit_name']}")
54
- print(f"Decoded Analogy: {target_texts['target_analogy']}")
55
- print("--------------------------")
56
-
 
 
 
 
 
57
  # Handle "Infinity" Sentinel
58
- if "end of knowledge" in target_texts['target_unit_name'].lower():
59
  direction = "larger" if model == successor_model else "smaller"
60
  prefix = "Giga-" if direction == "larger" else "pico-"
61
- new_unit = f"{prefix}{current_unit}"
62
- new_analogy = "A procedurally generated unit beyond the AI's known universe."
63
- new_commentary = "This represents a step into true infinity, where rules replace learned knowledge."
64
- return new_unit, new_analogy, new_commentary
65
- else:
66
- return target_texts['target_unit_name'], target_texts['target_analogy'], target_texts['target_commentary']
67
 
68
- # --- WRAPPER FUNCTIONS (MODIFIED) ---
69
- # These wrappers now pass the correct objects explicitly
70
- def go_larger(unit, analogy, commentary):
71
  print("\n>>> 'Go Larger' button clicked. Using SUCCESSOR model.")
72
- return predict_next_state(successor_model, successor_tokenizers, unit, analogy, commentary)
73
 
74
- def go_smaller(unit, analogy, commentary):
75
  print("\n>>> 'Go Smaller' button clicked. Using PREDECESSOR model.")
76
- return predict_next_state(predecessor_model, predecessor_tokenizers, unit, analogy, commentary)
77
 
78
- # --- 3. THE GRADIO USER INTERFACE (No changes needed here) ---
79
- initial_unit = "Byte"
80
- initial_analogy = "a single character of text, like 'R'"
81
- initial_commentary = "From binary choices, a building block is formed, ready to hold a single, recognizable symbol."
 
 
82
 
83
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="sky")) as demo:
84
  gr.Markdown("# 🤖 Digital Scale Explorer AI")
85
- # ... (the rest of the UI code is identical) ...
86
  gr.Markdown("An AI trained from scratch to explore the infinite ladder of data sizes. Click the buttons to traverse the universe of data!")
 
 
 
 
 
87
  with gr.Row():
88
- unit_name_out = gr.Textbox(value=initial_unit, label="Unit Name", interactive=False)
89
- analogy_out = gr.Textbox(value=initial_analogy, label="Analogy", lines=4, interactive=False)
90
- commentary_out = gr.Textbox(value=initial_commentary, label="AI Commentary", lines=3, interactive=False)
 
91
  with gr.Row():
92
  smaller_btn = gr.Button("Go Smaller ⬇️", variant="secondary", size="lg")
93
  larger_btn = gr.Button("Go Larger ⬆️", variant="primary", size="lg")
94
 
95
- larger_btn.click(fn=go_larger, inputs=[unit_name_out, analogy_out, commentary_out], outputs=[unit_name_out, analogy_out, commentary_out])
96
- smaller_btn.click(fn=go_smaller, inputs=[unit_name_out, analogy_out, commentary_out], outputs=[unit_name_out, analogy_out, commentary_out])
 
 
 
 
 
 
 
 
 
 
97
 
98
  if __name__ == "__main__":
99
  demo.launch()
 
1
+ # app.py (Final Version with gr.State for Robust State Management)
2
  import gradio as gr
3
  import tensorflow as tf
4
  import pickle
 
22
  print(f"FATAL ERROR loading files: {e}")
23
  successor_model, predecessor_model = None, None
24
 
25
+ # --- 2. THE CORE PREDICTION LOGIC ---
26
+ # This function is the same, but it will now receive its input from the reliable gr.State
27
+ def predict_next_state(model, tokenizers, current_state_dict):
28
+ if not model or not tokenizers or not current_state_dict:
29
+ return {"error": "Model or state is not loaded"}, "Error", "Error", "Error"
30
 
31
+ # Prepare input data from the state dictionary
32
+ input_data = {
33
+ 'current_unit_name': [current_state_dict['unit_name']],
34
+ 'current_analogy': [current_state_dict['analogy']],
35
+ 'current_commentary': [current_state_dict['commentary']]
36
+ }
37
  processed_input = {}
38
  for col, text_list in input_data.items():
39
  sequences = tokenizers[col].texts_to_sequences(text_list)
40
  padded_sequences = tf.keras.preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQ_LENGTH, padding='post')
41
  processed_input[col] = padded_sequences
42
 
 
43
  predictions = model.predict(processed_input)
44
 
45
+ # Decode prediction
46
  target_texts = {}
47
  output_cols = ['target_unit_name', 'target_analogy', 'target_commentary']
48
  for i, col in enumerate(output_cols):
 
51
  clean_text = ' '.join([word for word in predicted_sequence.split() if word not in ['<oov>', 'end']])
52
  target_texts[col] = clean_text.strip()
53
 
 
 
54
  print(f"Decoded Unit Name: {target_texts['target_unit_name']}")
55
+
56
+ # Create the new state dictionary
57
+ new_state = {
58
+ 'unit_name': target_texts['target_unit_name'],
59
+ 'analogy': target_texts['target_analogy'],
60
+ 'commentary': target_texts['target_commentary']
61
+ }
62
+
63
  # Handle "Infinity" Sentinel
64
+ if "end of knowledge" in new_state['unit_name'].lower():
65
  direction = "larger" if model == successor_model else "smaller"
66
  prefix = "Giga-" if direction == "larger" else "pico-"
67
+ new_state['unit_name'] = f"{prefix}{current_state_dict['unit_name']}"
68
+ new_state['analogy'] = "A procedurally generated unit beyond the AI's known universe."
69
+ new_state['commentary'] = "This represents a step into true infinity, where rules replace learned knowledge."
70
+
71
+ # Return the new state object and the values for the textboxes
72
+ return new_state, new_state['unit_name'], new_state['analogy'], new_state['commentary']
73
 
74
+ # --- WRAPPER FUNCTIONS ---
75
+ # They now take the state dictionary as input and return the new state dictionary
76
+ def go_larger(current_state):
77
  print("\n>>> 'Go Larger' button clicked. Using SUCCESSOR model.")
78
+ return predict_next_state(successor_model, successor_tokenizers, current_state)
79
 
80
+ def go_smaller(current_state):
81
  print("\n>>> 'Go Smaller' button clicked. Using PREDECESSOR model.")
82
+ return predict_next_state(predecessor_model, predecessor_tokenizers, current_state)
83
 
84
+ # --- 3. THE GRADIO USER INTERFACE (RE-ARCHITECTED) ---
85
+ initial_state = {
86
+ "unit_name": "Byte",
87
+ "analogy": "a single character of text, like 'R'",
88
+ "commentary": "From binary choices, a building block is formed, ready to hold a single, recognizable symbol."
89
+ }
90
 
91
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="sky")) as demo:
92
  gr.Markdown("# 🤖 Digital Scale Explorer AI")
 
93
  gr.Markdown("An AI trained from scratch to explore the infinite ladder of data sizes. Click the buttons to traverse the universe of data!")
94
+
95
+ # *** THIS IS THE KEY CHANGE ***
96
+ # Create an invisible component to reliably hold our state
97
+ app_state = gr.State(value=initial_state)
98
+
99
  with gr.Row():
100
+ unit_name_out = gr.Textbox(value=initial_state['unit_name'], label="Unit Name", interactive=False)
101
+ analogy_out = gr.Textbox(value=initial_state['analogy'], label="Analogy", lines=4, interactive=False)
102
+ commentary_out = gr.Textbox(value=initial_state['commentary'], label="AI Commentary", lines=3, interactive=False)
103
+
104
  with gr.Row():
105
  smaller_btn = gr.Button("Go Smaller ⬇️", variant="secondary", size="lg")
106
  larger_btn = gr.Button("Go Larger ⬆️", variant="primary", size="lg")
107
 
108
+ # --- The button clicks now use the app_state as their primary input and output ---
109
+ larger_btn.click(
110
+ fn=go_larger,
111
+ inputs=[app_state], # The INPUT is the reliable state object
112
+ # The OUTPUT is the new state object AND the values for the textboxes
113
+ outputs=[app_state, unit_name_out, analogy_out, commentary_out]
114
+ )
115
+ smaller_btn.click(
116
+ fn=go_smaller,
117
+ inputs=[app_state], # The INPUT is the reliable state object
118
+ outputs=[app_state, unit_name_out, analogy_out, commentary_out]
119
+ )
120
 
121
  if __name__ == "__main__":
122
  demo.launch()