szili2011 commited on
Commit
0a68069
·
verified ·
1 Parent(s): 2081e9d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -33
app.py CHANGED
@@ -1,4 +1,4 @@
1
- # app.py (Corrected Version)
2
  import gradio as gr
3
  import tensorflow as tf
4
  import pickle
@@ -6,6 +6,7 @@ import numpy as np
6
 
7
  # --- 1. CONFIGURATION & MODEL LOADING ---
8
  MAX_SEQ_LENGTH = 30
 
9
  print("Loading models and tokenizers...")
10
  try:
11
  successor_model = tf.keras.models.load_model('successor_model.h5')
@@ -21,39 +22,41 @@ except Exception as e:
21
  print(f"FATAL ERROR loading files: {e}")
22
  successor_model, predecessor_model = None, None
23
 
24
- # --- 2. THE CORE PREDICTION LOGIC ---
25
- def predict_next_state(direction, current_unit, current_analogy, current_commentary):
26
- if not all([successor_model, predecessor_model]):
27
- return "Error: Models are not loaded.", "Please check the server logs on Hugging Face.", "---"
 
28
 
29
- model = successor_model if direction == "larger" else predecessor_model
30
- tokenizers = successor_tokenizers if direction == "larger" else predecessor_tokenizers
31
-
32
- input_data = {
33
- 'current_unit_name': [current_unit],
34
- 'current_analogy': [current_analogy],
35
- 'current_commentary': [current_commentary]
36
- }
37
-
38
  processed_input = {}
39
  for col, text_list in input_data.items():
40
  sequences = tokenizers[col].texts_to_sequences(text_list)
41
  padded_sequences = tf.keras.preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQ_LENGTH, padding='post')
42
  processed_input[col] = padded_sequences
43
 
 
44
  predictions = model.predict(processed_input)
45
 
 
46
  target_texts = {}
47
  output_cols = ['target_unit_name', 'target_analogy', 'target_commentary']
48
-
49
  for i, col in enumerate(output_cols):
50
  pred_indices = np.argmax(predictions[i], axis=-1)
51
  predicted_sequence = tokenizers[col].sequences_to_texts(pred_indices)[0]
52
- # More robust cleaning
53
  clean_text = ' '.join([word for word in predicted_sequence.split() if word not in ['<oov>', 'end']])
54
  target_texts[col] = clean_text.strip()
55
 
 
 
 
 
 
 
 
56
  if "end of knowledge" in target_texts['target_unit_name'].lower():
 
57
  prefix = "Giga-" if direction == "larger" else "pico-"
58
  new_unit = f"{prefix}{current_unit}"
59
  new_analogy = "A procedurally generated unit beyond the AI's known universe."
@@ -62,42 +65,35 @@ def predict_next_state(direction, current_unit, current_analogy, current_comment
62
  else:
63
  return target_texts['target_unit_name'], target_texts['target_analogy'], target_texts['target_commentary']
64
 
65
- # Wrapper functions for the buttons
 
66
  def go_larger(unit, analogy, commentary):
67
- return predict_next_state("larger", unit, analogy, commentary)
 
68
 
69
  def go_smaller(unit, analogy, commentary):
70
- return predict_next_state("smaller", unit, analogy, commentary)
 
71
 
72
- # --- 3. THE GRADIO USER INTERFACE ---
73
  initial_unit = "Byte"
74
  initial_analogy = "a single character of text, like 'R'"
75
  initial_commentary = "From binary choices, a building block is formed, ready to hold a single, recognizable symbol."
76
 
77
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="sky")) as demo:
78
  gr.Markdown("# 🤖 Digital Scale Explorer AI")
 
79
  gr.Markdown("An AI trained from scratch to explore the infinite ladder of data sizes. Click the buttons to traverse the universe of data!")
80
-
81
  with gr.Row():
82
  unit_name_out = gr.Textbox(value=initial_unit, label="Unit Name", interactive=False)
83
  analogy_out = gr.Textbox(value=initial_analogy, label="Analogy", lines=4, interactive=False)
84
  commentary_out = gr.Textbox(value=initial_commentary, label="AI Commentary", lines=3, interactive=False)
85
-
86
  with gr.Row():
87
  smaller_btn = gr.Button("Go Smaller ⬇️", variant="secondary", size="lg")
88
  larger_btn = gr.Button("Go Larger ⬆️", variant="primary", size="lg")
89
 
90
- larger_btn.click(
91
- fn=go_larger,
92
- inputs=[unit_name_out, analogy_out, commentary_out],
93
- outputs=[unit_name_out, analogy_out, commentary_out]
94
- )
95
-
96
- smaller_btn.click(
97
- fn=go_smaller, # Corrected from go_larger to go_smaller
98
- inputs=[unit_name_out, analogy_out, commentary_out],
99
- outputs=[unit_name_out, analogy_out, commentary_out]
100
- )
101
 
102
  if __name__ == "__main__":
103
  demo.launch()
 
1
+ # app.py (Hardened and Debuggable Version)
2
  import gradio as gr
3
  import tensorflow as tf
4
  import pickle
 
6
 
7
  # --- 1. CONFIGURATION & MODEL LOADING ---
8
  MAX_SEQ_LENGTH = 30
9
+ print("--- App Starting Up ---")
10
  print("Loading models and tokenizers...")
11
  try:
12
  successor_model = tf.keras.models.load_model('successor_model.h5')
 
22
  print(f"FATAL ERROR loading files: {e}")
23
  successor_model, predecessor_model = None, None
24
 
25
+ # --- 2. THE CORE PREDICTION LOGIC (MODIFIED) ---
26
+ # This function now receives the actual model and tokenizer objects
27
+ def predict_next_state(model, tokenizers, current_unit, current_analogy, current_commentary):
28
+ if not model or not tokenizers:
29
+ return "Error: A required model or tokenizer is not loaded.", "Check server logs.", "---"
30
 
31
+ # Prepare input data
32
+ input_data = {'current_unit_name': [current_unit], 'current_analogy': [current_analogy], 'current_commentary': [current_commentary]}
 
 
 
 
 
 
 
33
  processed_input = {}
34
  for col, text_list in input_data.items():
35
  sequences = tokenizers[col].texts_to_sequences(text_list)
36
  padded_sequences = tf.keras.preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQ_LENGTH, padding='post')
37
  processed_input[col] = padded_sequences
38
 
39
+ # Get AI prediction
40
  predictions = model.predict(processed_input)
41
 
42
+ # Decode prediction back to text
43
  target_texts = {}
44
  output_cols = ['target_unit_name', 'target_analogy', 'target_commentary']
 
45
  for i, col in enumerate(output_cols):
46
  pred_indices = np.argmax(predictions[i], axis=-1)
47
  predicted_sequence = tokenizers[col].sequences_to_texts(pred_indices)[0]
 
48
  clean_text = ' '.join([word for word in predicted_sequence.split() if word not in ['<oov>', 'end']])
49
  target_texts[col] = clean_text.strip()
50
 
51
+ # *** DEBUGGING PRINT ***
52
+ print(f"--- PREDICTION DECODED ---")
53
+ print(f"Decoded Unit Name: {target_texts['target_unit_name']}")
54
+ print(f"Decoded Analogy: {target_texts['target_analogy']}")
55
+ print("--------------------------")
56
+
57
+ # Handle "Infinity" Sentinel
58
  if "end of knowledge" in target_texts['target_unit_name'].lower():
59
+ direction = "larger" if model == successor_model else "smaller"
60
  prefix = "Giga-" if direction == "larger" else "pico-"
61
  new_unit = f"{prefix}{current_unit}"
62
  new_analogy = "A procedurally generated unit beyond the AI's known universe."
 
65
  else:
66
  return target_texts['target_unit_name'], target_texts['target_analogy'], target_texts['target_commentary']
67
 
68
+ # --- WRAPPER FUNCTIONS (MODIFIED) ---
69
+ # These wrappers now pass the correct objects explicitly
70
  def go_larger(unit, analogy, commentary):
71
+ print("\n>>> 'Go Larger' button clicked. Using SUCCESSOR model.")
72
+ return predict_next_state(successor_model, successor_tokenizers, unit, analogy, commentary)
73
 
74
  def go_smaller(unit, analogy, commentary):
75
+ print("\n>>> 'Go Smaller' button clicked. Using PREDECESSOR model.")
76
+ return predict_next_state(predecessor_model, predecessor_tokenizers, unit, analogy, commentary)
77
 
78
+ # --- 3. THE GRADIO USER INTERFACE (No changes needed here) ---
79
  initial_unit = "Byte"
80
  initial_analogy = "a single character of text, like 'R'"
81
  initial_commentary = "From binary choices, a building block is formed, ready to hold a single, recognizable symbol."
82
 
83
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="sky")) as demo:
84
  gr.Markdown("# 🤖 Digital Scale Explorer AI")
85
+ # ... (the rest of the UI code is identical) ...
86
  gr.Markdown("An AI trained from scratch to explore the infinite ladder of data sizes. Click the buttons to traverse the universe of data!")
 
87
  with gr.Row():
88
  unit_name_out = gr.Textbox(value=initial_unit, label="Unit Name", interactive=False)
89
  analogy_out = gr.Textbox(value=initial_analogy, label="Analogy", lines=4, interactive=False)
90
  commentary_out = gr.Textbox(value=initial_commentary, label="AI Commentary", lines=3, interactive=False)
 
91
  with gr.Row():
92
  smaller_btn = gr.Button("Go Smaller ⬇️", variant="secondary", size="lg")
93
  larger_btn = gr.Button("Go Larger ⬆️", variant="primary", size="lg")
94
 
95
+ larger_btn.click(fn=go_larger, inputs=[unit_name_out, analogy_out, commentary_out], outputs=[unit_name_out, analogy_out, commentary_out])
96
+ smaller_btn.click(fn=go_smaller, inputs=[unit_name_out, analogy_out, commentary_out], outputs=[unit_name_out, analogy_out, commentary_out])
 
 
 
 
 
 
 
 
 
97
 
98
  if __name__ == "__main__":
99
  demo.launch()