Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,7 +5,6 @@ import os
|
|
| 5 |
import logging
|
| 6 |
import zipfile
|
| 7 |
import importlib
|
| 8 |
-
import sentry_sdk
|
| 9 |
import wandb
|
| 10 |
from contextlib import redirect_stdout, redirect_stderr
|
| 11 |
import spaces
|
|
@@ -13,40 +12,17 @@ import spaces
|
|
| 13 |
USE_WANDB = "WANDB_API_KEY" in os.environ
|
| 14 |
if USE_WANDB:
|
| 15 |
wandb.login(key=os.environ["WANDB_API_KEY"])
|
| 16 |
-
|
| 17 |
else:
|
| 18 |
print("Warning: WANDB_API_KEY not set. Skipping wandb logging.")
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
import gradio
|
| 23 |
-
import functools
|
| 24 |
-
from sentry_sdk import flush
|
| 25 |
-
|
| 26 |
-
orig_call_fn = gradio.blocks.Blocks.call_function # present in all 3.x & 4.x
|
| 27 |
-
|
| 28 |
-
@functools.wraps(orig_call_fn)
|
| 29 |
-
async def sentry_call_fn(self, *args, **kwargs):
|
| 30 |
-
try:
|
| 31 |
-
return await orig_call_fn(self, *args, **kwargs)
|
| 32 |
-
except Exception as exc:
|
| 33 |
-
capture_exception(exc)
|
| 34 |
-
flush(timeout=2)
|
| 35 |
-
raise
|
| 36 |
-
|
| 37 |
-
gradio.blocks.Blocks.call_function = sentry_call_fn
|
| 38 |
-
|
| 39 |
-
|
| 40 |
import gradio as gr
|
| 41 |
import pandas as pd
|
| 42 |
-
import os
|
| 43 |
import time
|
| 44 |
import sys
|
| 45 |
from datetime import datetime
|
| 46 |
import re
|
| 47 |
|
| 48 |
# --- Configuration ---
|
| 49 |
-
#AUTFORGE_SCRIPT_PATH = "auto_forge.py" # Make sure this points to your script
|
| 50 |
DEFAULT_MATERIALS_CSV = "default_materials.csv"
|
| 51 |
GRADIO_OUTPUT_BASE_DIR = "output"
|
| 52 |
os.makedirs(GRADIO_OUTPUT_BASE_DIR, exist_ok=True)
|
|
@@ -60,10 +36,6 @@ DISPLAY_COL_MAP = {
|
|
| 60 |
}
|
| 61 |
|
| 62 |
def exc_text(exc: BaseException) -> str:
|
| 63 |
-
"""
|
| 64 |
-
Return the human-readable message of *exc*.
|
| 65 |
-
Falls back to the class name if the message is empty.
|
| 66 |
-
"""
|
| 67 |
txt = str(exc).strip()
|
| 68 |
if txt:
|
| 69 |
return txt
|
|
@@ -72,41 +44,28 @@ def exc_text(exc: BaseException) -> str:
|
|
| 72 |
return exc.__class__.__name__
|
| 73 |
|
| 74 |
def ensure_required_cols(df, *, in_display_space):
|
| 75 |
-
"""
|
| 76 |
-
Return a copy of *df* with every required column present.
|
| 77 |
-
If *in_display_space* is True we use the display names
|
| 78 |
-
(Brand, Name, TD, Color (Hex)); otherwise we use the script names.
|
| 79 |
-
"""
|
| 80 |
target_cols = (
|
| 81 |
DISPLAY_COL_MAP if in_display_space else {k: k for k in REQUIRED_SCRIPT_COLS}
|
| 82 |
)
|
| 83 |
df_fixed = df.copy()
|
| 84 |
for col_script, col_display in target_cols.items():
|
| 85 |
if col_display not in df_fixed.columns:
|
| 86 |
-
# sensible defaults
|
| 87 |
if "TD" in col_display:
|
| 88 |
default = 0.0
|
| 89 |
elif "Color" in col_display:
|
| 90 |
default = "#000000"
|
| 91 |
-
elif "Owned" in col_display:
|
| 92 |
default = "false"
|
| 93 |
else:
|
| 94 |
default = ""
|
| 95 |
df_fixed[col_display] = default
|
| 96 |
-
# order columns nicely
|
| 97 |
return df_fixed[list(target_cols.values())]
|
| 98 |
|
| 99 |
-
|
| 100 |
def rgba_to_hex(col: str) -> str:
|
| 101 |
-
"""
|
| 102 |
-
Turn 'rgba(r, g, b, a)' or 'rgb(r, g, b)' into '#RRGGBB'.
|
| 103 |
-
If the input is already a hex code or anything unexpected,
|
| 104 |
-
return it unchanged.
|
| 105 |
-
"""
|
| 106 |
if not isinstance(col, str):
|
| 107 |
return col
|
| 108 |
col = col.strip()
|
| 109 |
-
if col.startswith("#"):
|
| 110 |
return col.upper()
|
| 111 |
|
| 112 |
m = re.match(
|
|
@@ -114,13 +73,12 @@ def rgba_to_hex(col: str) -> str:
|
|
| 114 |
col,
|
| 115 |
)
|
| 116 |
if not m:
|
| 117 |
-
return col
|
| 118 |
|
| 119 |
r, g, b = (int(float(x)) for x in m.groups()[:3])
|
| 120 |
return "#{:02X}{:02X}{:02X}".format(r, g, b)
|
| 121 |
|
| 122 |
def zip_dir_no_compress(src_dir: str, dest_zip: str) -> str:
|
| 123 |
-
"""Create *dest_zip* from *src_dir* using no compression (ZIP_STORED)."""
|
| 124 |
t0 = time.time()
|
| 125 |
with zipfile.ZipFile(dest_zip, "w",
|
| 126 |
compression=zipfile.ZIP_STORED,
|
|
@@ -128,18 +86,15 @@ def zip_dir_no_compress(src_dir: str, dest_zip: str) -> str:
|
|
| 128 |
for root, _, files in os.walk(src_dir):
|
| 129 |
for fname in files:
|
| 130 |
fpath = os.path.join(root, fname)
|
| 131 |
-
# keep folder structure inside the archive but drop the leading path
|
| 132 |
zf.write(fpath, os.path.relpath(fpath, src_dir))
|
| 133 |
print(f"Zipping finished in {time.time() - t0:.1f}s")
|
| 134 |
return dest_zip
|
| 135 |
|
| 136 |
-
# --- Helper Functions ---
|
| 137 |
def get_script_args_info(exclude_args=None):
|
| 138 |
if exclude_args is None:
|
| 139 |
exclude_args = []
|
| 140 |
|
| 141 |
all_args_info = [
|
| 142 |
-
# input_image is handled separately in the UI
|
| 143 |
{
|
| 144 |
"name": "--iterations",
|
| 145 |
"type": "number",
|
|
@@ -247,7 +202,7 @@ def get_script_args_info(exclude_args=None):
|
|
| 247 |
"min": 0.0,
|
| 248 |
"max": 1.0,
|
| 249 |
"step": 0.01,
|
| 250 |
-
"help": "Percentage of increment search for fast pruning.
|
| 251 |
},
|
| 252 |
{
|
| 253 |
"name": "--random_seed",
|
|
@@ -266,66 +221,80 @@ def get_script_args_info(exclude_args=None):
|
|
| 266 |
]
|
| 267 |
return [arg for arg in all_args_info if arg["name"] not in exclude_args]
|
| 268 |
|
| 269 |
-
|
| 270 |
-
# Initial filament data
|
| 271 |
initial_filament_data = {
|
| 272 |
-
"Brand": ["Generic", "Generic", "Generic","Generic","Generic","Generic"
|
| 273 |
-
" Name": ["PLA Black", "PLA Grey", "PLA White","PLA Red","PLA Green","PLA Blue"],
|
| 274 |
-
" TD": [5.0, 5.0, 5.0, 5.0, 5.0
|
| 275 |
-
" Color": ["#000000", "#808080", "#FFFFFF","#FF0000","#00FF00","#0000FF"],
|
| 276 |
-
" Owned": ["true", "true", "true", "true", "true", "true"],
|
| 277 |
}
|
| 278 |
-
initial_df = pd.DataFrame(initial_filament_data)
|
| 279 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 280 |
if os.path.exists(DEFAULT_MATERIALS_CSV):
|
| 281 |
try:
|
| 282 |
-
|
| 283 |
-
loaded_df =
|
| 284 |
-
|
| 285 |
-
# Make sure the columns we need are present
|
| 286 |
-
for col in ["Brand", " Name", " TD", " Color"]:
|
| 287 |
-
if col not in loaded_df.columns:
|
| 288 |
-
# keep it simple, create empty column
|
| 289 |
-
loaded_df[col] = "" if col != " TD" else 0.0
|
| 290 |
-
|
| 291 |
-
# Cast to the types we expect
|
| 292 |
-
loaded_df = loaded_df[["Brand", " Name", " TD", " Color"]].astype(
|
| 293 |
-
{" TD": float, " Color": str}
|
| 294 |
-
)
|
| 295 |
|
| 296 |
-
|
| 297 |
-
initial_df = loaded_df
|
| 298 |
|
| 299 |
-
# Important part:
|
| 300 |
-
# also update the in memory initial_filament_data so later fallbacks
|
| 301 |
-
# do not go back to the hardcoded 6 Generic rows
|
| 302 |
initial_filament_data = {
|
| 303 |
"Brand": initial_df["Brand"].tolist(),
|
| 304 |
" Name": initial_df[" Name"].tolist(),
|
| 305 |
" TD": initial_df[" TD"].tolist(),
|
| 306 |
" Color": initial_df[" Color"].tolist(),
|
| 307 |
}
|
| 308 |
-
|
| 309 |
-
# you can add a default here
|
| 310 |
-
if " Owned" not in initial_df.columns:
|
| 311 |
-
# fill with false by default
|
| 312 |
-
initial_filament_data[" Owned"] = ["false"] * len(initial_df)
|
| 313 |
-
else:
|
| 314 |
initial_filament_data[" Owned"] = initial_df[" Owned"].astype(str).tolist()
|
| 315 |
-
|
|
|
|
| 316 |
except Exception as e:
|
| 317 |
print(f"Warning: Could not load {DEFAULT_MATERIALS_CSV}: {e}. Using default.")
|
| 318 |
initial_df = pd.DataFrame(initial_filament_data)
|
| 319 |
else:
|
| 320 |
-
# CSV does not exist yet, create it from the hardcoded defaults
|
| 321 |
initial_df = pd.DataFrame(initial_filament_data)
|
| 322 |
initial_df.to_csv(DEFAULT_MATERIALS_CSV, index=False)
|
| 323 |
|
| 324 |
-
|
| 325 |
def run_autoforge_process(cmd, log_path):
|
| 326 |
-
"""Run AutoForge in-process and stream its console output to *log_path*."""
|
| 327 |
from joblib import parallel_backend
|
| 328 |
-
cli_args = cmd[1:]
|
| 329 |
autoforge_main = importlib.import_module("autoforge.__main__")
|
| 330 |
|
| 331 |
exit_code = 0
|
|
@@ -333,27 +302,23 @@ def run_autoforge_process(cmd, log_path):
|
|
| 333 |
redirect_stdout(log_f), redirect_stderr(log_f), parallel_backend("threading", n_jobs=-1):
|
| 334 |
try:
|
| 335 |
sys.argv = ["autoforge"] + cli_args
|
| 336 |
-
autoforge_main.main()
|
| 337 |
-
except SystemExit as e:
|
| 338 |
exit_code = e.code
|
|
|
|
|
|
|
|
|
|
| 339 |
|
| 340 |
return exit_code
|
| 341 |
|
| 342 |
-
|
| 343 |
-
# Helper for creating an empty 10-tuple for error returns
|
| 344 |
def create_empty_error_outputs(log_message=""):
|
| 345 |
return (
|
| 346 |
-
log_message,
|
| 347 |
-
None,
|
| 348 |
-
gr.update(visible=False, interactive=False),
|
| 349 |
)
|
| 350 |
|
| 351 |
def load_filaments_from_json_upload(file_obj):
|
| 352 |
-
"""
|
| 353 |
-
Called when the user picks a .json file and converts it to the
|
| 354 |
-
script-style DataFrame expected by the rest of the app.
|
| 355 |
-
"""
|
| 356 |
-
# ── early-out when nothing was chosen ──────────────────────────────
|
| 357 |
if file_obj is None:
|
| 358 |
current_script_df = filament_df_state.value
|
| 359 |
if current_script_df is not None and not current_script_df.empty:
|
|
@@ -371,37 +336,20 @@ def load_filaments_from_json_upload(file_obj):
|
|
| 371 |
data = data["Filaments"]
|
| 372 |
|
| 373 |
df_loaded = pd.DataFrame(data)
|
| 374 |
-
|
| 375 |
-
# strip whitespace around every header first
|
| 376 |
df_loaded.columns = [c.strip() for c in df_loaded.columns]
|
| 377 |
|
| 378 |
-
# convert Hue-forge “nice” headers to the script headers that
|
| 379 |
-
# still carry a leading blank
|
| 380 |
rename_map = {
|
| 381 |
-
"Name":
|
| 382 |
-
"Transmissivity":
|
| 383 |
"Color": " Color",
|
|
|
|
| 384 |
}
|
| 385 |
df_loaded.rename(
|
| 386 |
columns={k: v for k, v in rename_map.items() if k in df_loaded.columns},
|
| 387 |
inplace=True,
|
| 388 |
)
|
| 389 |
|
| 390 |
-
|
| 391 |
-
df_loaded[" TD"] = pd.to_numeric(df_loaded[" TD"], errors="coerce").fillna(
|
| 392 |
-
0.0
|
| 393 |
-
)
|
| 394 |
-
|
| 395 |
-
# now make sure the usual helpers see exactly the expected headers
|
| 396 |
-
df_loaded = ensure_required_cols(df_loaded, in_display_space=False)
|
| 397 |
-
|
| 398 |
-
expected_cols = ["Brand", " Name", " TD", " Color"]
|
| 399 |
-
if not all(col in df_loaded.columns for col in expected_cols):
|
| 400 |
-
gr.Error(
|
| 401 |
-
f"JSON must contain keys/columns: {', '.join(expected_cols)}. "
|
| 402 |
-
f"Found: {df_loaded.columns.tolist()}"
|
| 403 |
-
)
|
| 404 |
-
return filament_table.value # keep the table unchanged
|
| 405 |
|
| 406 |
filament_df_state.value = df_loaded.copy()
|
| 407 |
|
|
@@ -411,10 +359,8 @@ def load_filaments_from_json_upload(file_obj):
|
|
| 411 |
|
| 412 |
except Exception as e:
|
| 413 |
gr.Error(f"Error loading JSON: {e}")
|
| 414 |
-
return filament_table.value
|
| 415 |
-
|
| 416 |
|
| 417 |
-
# --- Gradio UI Definition ---
|
| 418 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 419 |
gr.Markdown("# [Autoforge](https://github.com/hvoss-techfak/AutoForge) Web UI")
|
| 420 |
|
|
@@ -430,7 +376,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 430 |
'If you have Hueforge, you can export your filaments under "Filaments -> Export" in the Hueforge software. Please make sure to select "CSV" instead of "JSON" during the export dialog.'
|
| 431 |
)
|
| 432 |
gr.Markdown(
|
| 433 |
-
'If you want to load your personal library of Hueforge filaments, you can also simply paste this path into your explorer address bar: %APPDATA
|
| 434 |
)
|
| 435 |
gr.Markdown(
|
| 436 |
'To remove a filament simply right-click on any of the fields and select "Delete Row"'
|
|
@@ -443,10 +389,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 443 |
load_csv_button = gr.UploadButton(
|
| 444 |
"Load Filaments CSV", file_types=[".csv"]
|
| 445 |
)
|
| 446 |
-
load_json_button = gr.UploadButton(
|
| 447 |
"Load Filaments JSON", file_types=[".json"]
|
| 448 |
)
|
| 449 |
save_csv_button = gr.Button("Save Current Filaments to CSV")
|
|
|
|
| 450 |
filament_table = gr.DataFrame(
|
| 451 |
value=ensure_required_cols(
|
| 452 |
initial_df.copy().rename(
|
|
@@ -459,6 +406,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 459 |
interactive=True,
|
| 460 |
label="Filaments",
|
| 461 |
)
|
|
|
|
| 462 |
gr.Markdown("## Add New Filament")
|
| 463 |
with gr.Row():
|
| 464 |
new_brand = gr.Textbox(label="Brand")
|
|
@@ -479,8 +427,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 479 |
|
| 480 |
def update_filament_df_state_from_table(display_df):
|
| 481 |
display_df = ensure_required_cols(display_df, in_display_space=True)
|
| 482 |
-
|
| 483 |
-
# make sure every colour is hex
|
| 484 |
if "Color (Hex)" in display_df.columns:
|
| 485 |
display_df["Color (Hex)"] = display_df["Color (Hex)"].apply(
|
| 486 |
rgba_to_hex
|
|
@@ -497,7 +443,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 497 |
gr.Warning("Brand and Name cannot be empty.")
|
| 498 |
return current_display_df
|
| 499 |
|
| 500 |
-
color_hex = rgba_to_hex(color_hex)
|
| 501 |
|
| 502 |
new_row = pd.DataFrame(
|
| 503 |
[{"Brand": brand, "Name": name, "TD": td, "Color (Hex)": color_hex}]
|
|
@@ -523,26 +469,18 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 523 |
columns={" Name": "Name", " TD": "TD", " Color": "Color (Hex)"}
|
| 524 |
)
|
| 525 |
try:
|
| 526 |
-
loaded_script_df = pd.read_csv(file_obj.name)
|
| 527 |
-
loaded_script_df =
|
| 528 |
-
|
| 529 |
-
)
|
| 530 |
expected_cols = ["Brand", " Name", " TD", " Color"]
|
| 531 |
-
if not all(
|
| 532 |
-
col in loaded_script_df.columns for col in expected_cols
|
| 533 |
-
):
|
| 534 |
gr.Error(
|
| 535 |
f"CSV must contain columns: {', '.join(expected_cols)}. Found: {loaded_script_df.columns.tolist()}"
|
| 536 |
)
|
| 537 |
-
capture_exception(
|
| 538 |
-
Exception(
|
| 539 |
-
f"CSV must contain columns: {', '.join(expected_cols)}. Found: {loaded_script_df.columns.tolist()}"
|
| 540 |
-
)
|
| 541 |
-
)
|
| 542 |
current_script_df = filament_df_state.value
|
| 543 |
if (
|
| 544 |
-
|
| 545 |
-
|
| 546 |
):
|
| 547 |
return current_script_df.rename(
|
| 548 |
columns={
|
|
@@ -564,7 +502,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 564 |
)
|
| 565 |
except Exception as e:
|
| 566 |
gr.Error(f"Error loading CSV: {e}")
|
| 567 |
-
capture_exception(e)
|
| 568 |
current_script_df = filament_df_state.value
|
| 569 |
if current_script_df is not None and not current_script_df.empty:
|
| 570 |
return current_script_df.rename(
|
|
@@ -580,8 +517,8 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 580 |
|
| 581 |
def save_filaments_to_file_for_download(current_script_df_from_state):
|
| 582 |
if (
|
| 583 |
-
|
| 584 |
-
|
| 585 |
):
|
| 586 |
gr.Warning("Filament table is empty. Nothing to save.")
|
| 587 |
return None
|
|
@@ -591,7 +528,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 591 |
gr.Error(
|
| 592 |
f"Cannot save. DataFrame missing required script columns. Expected: {required_cols}. Found: {df_to_save.columns.tolist()}"
|
| 593 |
)
|
| 594 |
-
capture_exception(Exception(f"Missing columns: {df_to_save.columns.tolist()}"))
|
| 595 |
return None
|
| 596 |
temp_dir = os.path.join(GRADIO_OUTPUT_BASE_DIR, "_temp_downloads")
|
| 597 |
os.makedirs(temp_dir, exist_ok=True)
|
|
@@ -609,7 +545,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 609 |
visible=True,
|
| 610 |
)
|
| 611 |
except Exception as e:
|
| 612 |
-
capture_exception(e)
|
| 613 |
gr.Error(f"Error saving CSV for download: {e}")
|
| 614 |
return None
|
| 615 |
|
|
@@ -646,22 +581,15 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 646 |
accordion_params_ordered_names = []
|
| 647 |
|
| 648 |
gr.Markdown(
|
| 649 |
-
'Here you can upload an image, adjust the parameters and run the Autoforge process. The filaments from the "Filament Management" Tab are automatically used.
|
| 650 |
-
)
|
| 651 |
-
gr.Markdown(
|
| 652 |
-
'If you want to limit the number of colors or color swaps you can find the option under the "Autoforge Parameters" as "pruning_max_colors" and "pruning_max_swaps"'
|
| 653 |
-
)
|
| 654 |
-
gr.Markdown(
|
| 655 |
-
'Please note that huggingface enforces a maximum execution time of two minutes. Depending on your configuration (especially iteration count) it is possible to exceed this time limit. In that case you will see a "GPU Task aborted" error or simply "Error".'
|
| 656 |
-
' If you need more time, take a look at the [Autoforge Github Page](https://github.com/hvoss-techfak/AutoForge) to see how you can run the program locally, or pull the docker container for this project (upper right corner -> three dots -> "run locally")'
|
| 657 |
)
|
| 658 |
|
| 659 |
with gr.Row():
|
| 660 |
with gr.Column(scale=1):
|
| 661 |
gr.Markdown("### Input Image (Required)")
|
| 662 |
-
input_image_component = gr.Image(
|
| 663 |
-
type="pil",
|
| 664 |
-
image_mode="RGBA",
|
| 665 |
label="Upload Image",
|
| 666 |
sources=["upload"],
|
| 667 |
interactive=True,
|
|
@@ -729,7 +657,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 729 |
elem_id="run_button_full_width",
|
| 730 |
)
|
| 731 |
|
| 732 |
-
|
| 733 |
progress_output = gr.Textbox(
|
| 734 |
label="Console Output",
|
| 735 |
lines=15,
|
|
@@ -745,18 +672,15 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 745 |
visible=False,
|
| 746 |
)
|
| 747 |
|
| 748 |
-
# --- Backend Function for Running the Script ---
|
| 749 |
@spaces.GPU(duration=150)
|
| 750 |
def execute_autoforge_script(
|
| 751 |
-
|
| 752 |
):
|
| 753 |
|
| 754 |
log_output = []
|
| 755 |
|
| 756 |
-
# 0. Validate Inputs
|
| 757 |
if input_image is None:
|
| 758 |
gr.Error("Input Image is required! Please upload an image.")
|
| 759 |
-
capture_exception(Exception("Input Image is required!"))
|
| 760 |
return create_empty_error_outputs("Error: Input Image is required!")
|
| 761 |
|
| 762 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + str(uuid.uuid4())
|
|
@@ -764,15 +688,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 764 |
os.makedirs(run_output_dir_val, exist_ok=True)
|
| 765 |
current_run_output_dir.value = run_output_dir_val
|
| 766 |
|
| 767 |
-
# 1. Save current filaments
|
| 768 |
if (
|
| 769 |
-
|
| 770 |
-
|
| 771 |
):
|
| 772 |
gr.Error("Filament table is empty. Please add filaments.")
|
| 773 |
-
capture_exception(
|
| 774 |
-
Exception("Filament table is empty. Please add filaments.")
|
| 775 |
-
)
|
| 776 |
return create_empty_error_outputs("Error: Filament table is empty.")
|
| 777 |
|
| 778 |
temp_filament_csv = os.path.join(run_output_dir_val, "materials.csv")
|
|
@@ -784,34 +704,26 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 784 |
f"Error: Filament data is missing columns: {', '.join(missing_cols)}."
|
| 785 |
)
|
| 786 |
gr.Error(err_msg)
|
| 787 |
-
capture_exception(
|
| 788 |
-
Exception(f"Filament data is missing columns: {', '.join(missing_cols)}.")
|
| 789 |
-
)
|
| 790 |
return create_empty_error_outputs(err_msg)
|
| 791 |
try:
|
| 792 |
df_to_save.to_csv(temp_filament_csv, index=False)
|
| 793 |
except Exception as e:
|
| 794 |
-
capture_exception(e)
|
| 795 |
err_msg = f"Error saving temporary filament CSV: {e}"
|
| 796 |
gr.Error(err_msg)
|
| 797 |
return create_empty_error_outputs(err_msg)
|
| 798 |
|
| 799 |
-
|
| 800 |
-
python_executable = sys.executable or "python"
|
| 801 |
-
command = ["autoforge",]
|
| 802 |
command.extend(["--csv_file", temp_filament_csv])
|
| 803 |
command.extend(["--output_folder", run_output_dir_val])
|
| 804 |
-
command.extend(["--disable_visualization_for_gradio","1"])
|
| 805 |
|
| 806 |
try:
|
| 807 |
-
# decide where to store the image we pass to Autoforge
|
| 808 |
script_input_image_path = os.path.join(
|
| 809 |
run_output_dir_val, "input_image.png"
|
| 810 |
)
|
| 811 |
input_image.save(script_input_image_path, format="PNG")
|
| 812 |
command.extend(["--input_image", script_input_image_path])
|
| 813 |
except Exception as e:
|
| 814 |
-
capture_exception(e)
|
| 815 |
err_msg = f"Error handling input image: {e}"
|
| 816 |
gr.Error(err_msg)
|
| 817 |
return create_empty_error_outputs(err_msg)
|
|
@@ -821,11 +733,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 821 |
if arg_widget_val is None or arg_widget_val == "":
|
| 822 |
arg_info_list = [
|
| 823 |
item for item in get_script_args_info() if item["name"] == arg_name
|
| 824 |
-
]
|
| 825 |
if (
|
| 826 |
-
|
| 827 |
-
|
| 828 |
-
|
| 829 |
):
|
| 830 |
continue
|
| 831 |
else:
|
|
@@ -840,8 +752,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 840 |
else:
|
| 841 |
command.extend([arg_name, str(arg_widget_val)])
|
| 842 |
|
| 843 |
-
|
| 844 |
-
# 3. Run script
|
| 845 |
log_output = [
|
| 846 |
"Starting Autoforge process at ",
|
| 847 |
f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n",
|
|
@@ -849,45 +759,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 849 |
f"Command: {' '.join(command)}\n\n",
|
| 850 |
]
|
| 851 |
|
| 852 |
-
yield create_empty_error_outputs(log_output)
|
| 853 |
-
|
| 854 |
-
|
| 855 |
-
def _maybe_new_preview():
|
| 856 |
-
"""
|
| 857 |
-
If vis_temp.png has a newer mtime than last time, copy it to a
|
| 858 |
-
stamped name (to defeat browser cache) and return that path.
|
| 859 |
-
Otherwise return gr.update() so the image stays as-is.
|
| 860 |
-
"""
|
| 861 |
-
from gradio import update # local import for clarity
|
| 862 |
-
|
| 863 |
-
nonlocal preview_mtime
|
| 864 |
-
|
| 865 |
-
src = os.path.join(run_output_dir_val, "vis_temp.png")
|
| 866 |
-
if not os.path.exists(src):
|
| 867 |
-
return update() # nothing new, keep old
|
| 868 |
-
|
| 869 |
-
mtime = os.path.getmtime(src)
|
| 870 |
-
if mtime <= preview_mtime: # unchanged
|
| 871 |
-
return update() # → no UI update
|
| 872 |
-
|
| 873 |
-
return src # → refresh image
|
| 874 |
-
|
| 875 |
-
# ---- run Autoforge on the GPU in a helper thread ------------------
|
| 876 |
|
| 877 |
log_file = os.path.join(run_output_dir_val, "autoforge_live.log")
|
| 878 |
open(log_file, "w", encoding="utf-8").close()
|
| 879 |
|
| 880 |
-
cmd_str = " ".join(command)
|
| 881 |
-
sentry_sdk.capture_event(
|
| 882 |
-
{
|
| 883 |
-
"message": "Autoforge process started",
|
| 884 |
-
"level": "info",
|
| 885 |
-
"fingerprint": ["autoforge-process-start"], # every start groups here
|
| 886 |
-
"extra": {"command": cmd_str}, # still searchable
|
| 887 |
-
}
|
| 888 |
-
)
|
| 889 |
-
|
| 890 |
-
# simple thread that just calls the GPU helper and stores the exit code
|
| 891 |
import threading
|
| 892 |
|
| 893 |
class Worker(threading.Thread):
|
|
@@ -895,26 +771,19 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 895 |
super().__init__(daemon=True)
|
| 896 |
self.cmd, self.log_path = cmd, log_path
|
| 897 |
self.returncode = None
|
|
|
|
| 898 |
|
| 899 |
def run(self):
|
| 900 |
try:
|
| 901 |
self.returncode = run_autoforge_process(self.cmd, self.log_path)
|
| 902 |
except Exception as e:
|
| 903 |
-
exc_str = exc_text(e)
|
| 904 |
self.exc = e
|
| 905 |
-
capture_exception(e) # still goes to Sentry
|
| 906 |
-
|
| 907 |
-
# make the error visible in the UI console
|
| 908 |
with open(self.log_path, "a", encoding="utf-8") as lf:
|
| 909 |
lf.write(
|
| 910 |
-
"\nERROR: {}. This usually means
|
| 911 |
-
|
|
|
|
| 912 |
)
|
| 913 |
-
gr.Error(
|
| 914 |
-
"ERROR: {}. This usually means that you, your IP adress or the the space has no free GPU "
|
| 915 |
-
"minutes left, or the process took too long due to too many filaments or changed parameters. Please clone the docker container, run it locally or wait for a bit.\n".format(exc_str)
|
| 916 |
-
)
|
| 917 |
-
# a non-zero code tells the outer loop something went wrong
|
| 918 |
self.returncode = -1
|
| 919 |
|
| 920 |
try:
|
|
@@ -923,97 +792,70 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 923 |
|
| 924 |
preview_mtime = 0
|
| 925 |
last_push = 0
|
| 926 |
-
file_pos = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 927 |
|
| 928 |
while worker.is_alive() or file_pos < os.path.getsize(log_file):
|
| 929 |
-
# read any new console text
|
| 930 |
with open(log_file, "r", encoding="utf-8") as lf:
|
| 931 |
lf.seek(file_pos)
|
| 932 |
new_txt = lf.read()
|
| 933 |
file_pos = lf.tell()
|
| 934 |
-
log_output
|
| 935 |
|
| 936 |
now = time.time()
|
| 937 |
-
if now - last_push >= 1.0:
|
| 938 |
current_preview = _maybe_new_preview()
|
| 939 |
yield (
|
| 940 |
"".join(log_output),
|
| 941 |
current_preview,
|
| 942 |
-
gr.update(),
|
| 943 |
)
|
| 944 |
last_push = now
|
| 945 |
|
| 946 |
time.sleep(0.05)
|
| 947 |
|
| 948 |
-
worker.join()
|
| 949 |
except RuntimeError as e:
|
| 950 |
-
|
| 951 |
-
|
| 952 |
-
gr.Error(str(e)) # <-- this is the toast
|
| 953 |
-
capture_exception(e)
|
| 954 |
-
|
| 955 |
with open(log_file, "r", encoding="utf-8") as lf:
|
| 956 |
lf.seek(file_pos)
|
| 957 |
new_txt = lf.read()
|
| 958 |
file_pos = lf.tell()
|
| 959 |
-
log_output
|
| 960 |
yield (
|
| 961 |
"".join(log_output),
|
| 962 |
-
|
| 963 |
-
gr.update(),
|
| 964 |
)
|
| 965 |
return create_empty_error_outputs(str(e))
|
| 966 |
|
| 967 |
if getattr(worker, "exc", None) is not None:
|
| 968 |
-
# worker.exc will be the ZeroGPU / scheduler error
|
| 969 |
err_msg = f"GPU run failed: {worker.exc}"
|
| 970 |
-
log_output
|
| 971 |
-
gr.Error(err_msg) # toast
|
| 972 |
-
yield ( # push the message into the textbox
|
| 973 |
-
"".join(log_output),
|
| 974 |
-
_maybe_new_preview(),
|
| 975 |
-
gr.update(),
|
| 976 |
-
)
|
| 977 |
-
return # stop the coroutine cleanly
|
| 978 |
-
|
| 979 |
-
# If the GPU scheduler threw, we already wrote the text into the log.
|
| 980 |
-
# Just read the tail once more so it reaches the UI textbox.
|
| 981 |
-
with open(log_file, "r", encoding="utf-8") as lf:
|
| 982 |
-
lf.seek(file_pos)
|
| 983 |
-
log_output += lf.read()
|
| 984 |
-
|
| 985 |
-
return_code = worker.returncode
|
| 986 |
-
|
| 987 |
-
try:
|
| 988 |
-
sentry_sdk.add_attachment(
|
| 989 |
-
path=log_file,
|
| 990 |
-
filename="autoforge.log",
|
| 991 |
-
content_type="text/plain",
|
| 992 |
-
)
|
| 993 |
-
except Exception as e:
|
| 994 |
-
capture_exception(e)
|
| 995 |
-
|
| 996 |
-
if worker.returncode != 0:
|
| 997 |
-
err_msg = (
|
| 998 |
-
f"Autoforge exited with code {worker.returncode}\n"
|
| 999 |
-
"See the console output above for details."
|
| 1000 |
-
)
|
| 1001 |
-
log_output += f"\n{err_msg}\n"
|
| 1002 |
gr.Error(err_msg)
|
| 1003 |
yield (
|
| 1004 |
"".join(log_output),
|
| 1005 |
-
|
| 1006 |
gr.update(),
|
| 1007 |
)
|
| 1008 |
return
|
| 1009 |
-
log_output += (
|
| 1010 |
-
"\nAutoforge process completed successfully!"
|
| 1011 |
-
if return_code == 0
|
| 1012 |
-
else f"\nAutoforge process failed with exit code {return_code}."
|
| 1013 |
-
)
|
| 1014 |
-
log_str = " ".join(log_output)
|
| 1015 |
|
|
|
|
|
|
|
|
|
|
| 1016 |
|
|
|
|
| 1017 |
|
| 1018 |
files_to_offer = [
|
| 1019 |
p
|
|
@@ -1028,31 +870,34 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 1028 |
png_path = os.path.join(run_output_dir_val, "final_model.png")
|
| 1029 |
out_png = png_path if os.path.exists(png_path) else None
|
| 1030 |
|
| 1031 |
-
if
|
| 1032 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1033 |
|
| 1034 |
zip_path = None
|
| 1035 |
if files_to_offer:
|
| 1036 |
zip_path = os.path.join(run_output_dir_val, "autoforge_results.zip")
|
| 1037 |
-
log_output
|
| 1038 |
try:
|
| 1039 |
with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_STORED) as zf:
|
| 1040 |
for f in files_to_offer:
|
| 1041 |
zf.write(f, os.path.basename(f))
|
| 1042 |
-
log_output
|
| 1043 |
except Exception as e:
|
| 1044 |
-
|
| 1045 |
-
|
| 1046 |
-
zip_path = None # Don't offer a broken zip
|
| 1047 |
-
|
| 1048 |
-
sentry_sdk.capture_event( # moved inside the same scope
|
| 1049 |
-
{
|
| 1050 |
-
"message": "Autoforge process finished",
|
| 1051 |
-
"level": "info",
|
| 1052 |
-
"fingerprint": ["autoforge-process-finished"],
|
| 1053 |
-
"extra": {"log": log_str},
|
| 1054 |
-
}
|
| 1055 |
-
)
|
| 1056 |
|
| 1057 |
if USE_WANDB:
|
| 1058 |
run = None
|
|
@@ -1063,38 +908,33 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 1063 |
notes="Autoforge Web UI run",
|
| 1064 |
tags=["autoforge", "gradio"],
|
| 1065 |
)
|
| 1066 |
-
wlogs= {"input_image": wandb.Image(script_input_image_path)
|
| 1067 |
if out_png:
|
| 1068 |
wlogs["output_image"] = wandb.Image(out_png)
|
| 1069 |
-
|
| 1070 |
material_csv = pd.read_csv(temp_filament_csv)
|
| 1071 |
table = wandb.Table(dataframe=material_csv)
|
| 1072 |
wlogs["materials"] = table
|
| 1073 |
-
#log log_output as pandas table
|
| 1074 |
from wandb import Html
|
| 1075 |
log_text = "".join(log_output).replace("\r", "\n")
|
| 1076 |
|
| 1077 |
def clean_log_strict(text: str) -> str:
|
| 1078 |
-
# Keep only printable characters + newline + tab
|
| 1079 |
allowed = set(string.printable) | {"\n", "\t"}
|
| 1080 |
return "".join(ch for ch in text if ch in allowed)
|
| 1081 |
|
| 1082 |
log_text_cleaned = clean_log_strict(log_text)
|
| 1083 |
wlogs["log"] = Html(f"<pre>{log_text_cleaned}</pre>")
|
| 1084 |
|
| 1085 |
-
|
| 1086 |
wandb.log(wlogs)
|
| 1087 |
except Exception as e:
|
| 1088 |
-
#we don't want wandb errors logged in sentry
|
| 1089 |
print(e)
|
| 1090 |
finally:
|
| 1091 |
if run is not None:
|
| 1092 |
run.finish()
|
| 1093 |
|
| 1094 |
yield (
|
| 1095 |
-
"".join(log_output),
|
| 1096 |
-
out_png,
|
| 1097 |
-
gr.update(
|
| 1098 |
value=zip_path,
|
| 1099 |
visible=bool(zip_path),
|
| 1100 |
interactive=bool(zip_path),
|
|
@@ -1107,19 +947,19 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
| 1107 |
run_outputs = [
|
| 1108 |
progress_output,
|
| 1109 |
final_image_preview,
|
| 1110 |
-
download_results,
|
| 1111 |
]
|
| 1112 |
|
| 1113 |
run_button.click(execute_autoforge_script, inputs=run_inputs, outputs=run_outputs)
|
| 1114 |
|
| 1115 |
css = """ #run_button_full_width { width: 100%; } """
|
| 1116 |
-
if __name__ == "__main__":
|
| 1117 |
|
|
|
|
| 1118 |
if not os.path.exists(DEFAULT_MATERIALS_CSV):
|
| 1119 |
print(f"Creating default filament file: {DEFAULT_MATERIALS_CSV}")
|
| 1120 |
try:
|
| 1121 |
initial_df.to_csv(DEFAULT_MATERIALS_CSV, index=False)
|
| 1122 |
except Exception as e:
|
| 1123 |
print(f"Could not write default {DEFAULT_MATERIALS_CSV}: {e}")
|
| 1124 |
-
print("To run the UI, execute: python app.py")
|
| 1125 |
demo.queue(default_concurrency_limit=1).launch(share=False)
|
|
|
|
| 5 |
import logging
|
| 6 |
import zipfile
|
| 7 |
import importlib
|
|
|
|
| 8 |
import wandb
|
| 9 |
from contextlib import redirect_stdout, redirect_stderr
|
| 10 |
import spaces
|
|
|
|
| 12 |
USE_WANDB = "WANDB_API_KEY" in os.environ
|
| 13 |
if USE_WANDB:
|
| 14 |
wandb.login(key=os.environ["WANDB_API_KEY"])
|
|
|
|
| 15 |
else:
|
| 16 |
print("Warning: WANDB_API_KEY not set. Skipping wandb logging.")
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
import gradio as gr
|
| 19 |
import pandas as pd
|
|
|
|
| 20 |
import time
|
| 21 |
import sys
|
| 22 |
from datetime import datetime
|
| 23 |
import re
|
| 24 |
|
| 25 |
# --- Configuration ---
|
|
|
|
| 26 |
DEFAULT_MATERIALS_CSV = "default_materials.csv"
|
| 27 |
GRADIO_OUTPUT_BASE_DIR = "output"
|
| 28 |
os.makedirs(GRADIO_OUTPUT_BASE_DIR, exist_ok=True)
|
|
|
|
| 36 |
}
|
| 37 |
|
| 38 |
def exc_text(exc: BaseException) -> str:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
txt = str(exc).strip()
|
| 40 |
if txt:
|
| 41 |
return txt
|
|
|
|
| 44 |
return exc.__class__.__name__
|
| 45 |
|
| 46 |
def ensure_required_cols(df, *, in_display_space):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
target_cols = (
|
| 48 |
DISPLAY_COL_MAP if in_display_space else {k: k for k in REQUIRED_SCRIPT_COLS}
|
| 49 |
)
|
| 50 |
df_fixed = df.copy()
|
| 51 |
for col_script, col_display in target_cols.items():
|
| 52 |
if col_display not in df_fixed.columns:
|
|
|
|
| 53 |
if "TD" in col_display:
|
| 54 |
default = 0.0
|
| 55 |
elif "Color" in col_display:
|
| 56 |
default = "#000000"
|
| 57 |
+
elif "Owned" in col_display:
|
| 58 |
default = "false"
|
| 59 |
else:
|
| 60 |
default = ""
|
| 61 |
df_fixed[col_display] = default
|
|
|
|
| 62 |
return df_fixed[list(target_cols.values())]
|
| 63 |
|
|
|
|
| 64 |
def rgba_to_hex(col: str) -> str:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
if not isinstance(col, str):
|
| 66 |
return col
|
| 67 |
col = col.strip()
|
| 68 |
+
if col.startswith("#"):
|
| 69 |
return col.upper()
|
| 70 |
|
| 71 |
m = re.match(
|
|
|
|
| 73 |
col,
|
| 74 |
)
|
| 75 |
if not m:
|
| 76 |
+
return col
|
| 77 |
|
| 78 |
r, g, b = (int(float(x)) for x in m.groups()[:3])
|
| 79 |
return "#{:02X}{:02X}{:02X}".format(r, g, b)
|
| 80 |
|
| 81 |
def zip_dir_no_compress(src_dir: str, dest_zip: str) -> str:
|
|
|
|
| 82 |
t0 = time.time()
|
| 83 |
with zipfile.ZipFile(dest_zip, "w",
|
| 84 |
compression=zipfile.ZIP_STORED,
|
|
|
|
| 86 |
for root, _, files in os.walk(src_dir):
|
| 87 |
for fname in files:
|
| 88 |
fpath = os.path.join(root, fname)
|
|
|
|
| 89 |
zf.write(fpath, os.path.relpath(fpath, src_dir))
|
| 90 |
print(f"Zipping finished in {time.time() - t0:.1f}s")
|
| 91 |
return dest_zip
|
| 92 |
|
|
|
|
| 93 |
def get_script_args_info(exclude_args=None):
|
| 94 |
if exclude_args is None:
|
| 95 |
exclude_args = []
|
| 96 |
|
| 97 |
all_args_info = [
|
|
|
|
| 98 |
{
|
| 99 |
"name": "--iterations",
|
| 100 |
"type": "number",
|
|
|
|
| 202 |
"min": 0.0,
|
| 203 |
"max": 1.0,
|
| 204 |
"step": 0.01,
|
| 205 |
+
"help": "Percentage of increment search for fast pruning.",
|
| 206 |
},
|
| 207 |
{
|
| 208 |
"name": "--random_seed",
|
|
|
|
| 221 |
]
|
| 222 |
return [arg for arg in all_args_info if arg["name"] not in exclude_args]
|
| 223 |
|
| 224 |
+
# initial data that will be used if no CSV exists
|
|
|
|
| 225 |
initial_filament_data = {
|
| 226 |
+
"Brand": ["Generic", "Generic", "Generic", "Generic", "Generic", "Generic"],
|
| 227 |
+
" Name": ["PLA Black", "PLA Grey", "PLA White", "PLA Red", "PLA Green", "PLA Blue"],
|
| 228 |
+
" TD": [5.0, 5.0, 5.0, 5.0, 5.0],
|
| 229 |
+
" Color": ["#000000", "#808080", "#FFFFFF", "#FF0000", "#00FF00", "#0000FF"],
|
| 230 |
+
" Owned": ["true", "true", "true", "true", "true", "true"],
|
| 231 |
}
|
|
|
|
| 232 |
|
| 233 |
+
def normalize_filament_df(df: pd.DataFrame) -> pd.DataFrame:
|
| 234 |
+
df = df.copy()
|
| 235 |
+
|
| 236 |
+
df.columns = [c.strip() for c in df.columns]
|
| 237 |
+
|
| 238 |
+
rename_map = {
|
| 239 |
+
"Name": " Name",
|
| 240 |
+
"TD": " TD",
|
| 241 |
+
"Color": " Color",
|
| 242 |
+
"Owned": " Owned",
|
| 243 |
+
}
|
| 244 |
+
for src, dst in rename_map.items():
|
| 245 |
+
if src in df.columns and dst not in df.columns:
|
| 246 |
+
df.rename(columns={src: dst}, inplace=True)
|
| 247 |
+
|
| 248 |
+
if " TD" in df.columns:
|
| 249 |
+
df[" TD"] = pd.to_numeric(df[" TD"], errors="coerce").fillna(0.0)
|
| 250 |
+
else:
|
| 251 |
+
df[" TD"] = 0.0
|
| 252 |
+
|
| 253 |
+
if " Color" in df.columns:
|
| 254 |
+
df[" Color"] = df[" Color"].astype(str)
|
| 255 |
+
else:
|
| 256 |
+
df[" Color"] = "#000000"
|
| 257 |
+
|
| 258 |
+
if " Owned" not in df.columns:
|
| 259 |
+
df[" Owned"] = "false"
|
| 260 |
+
else:
|
| 261 |
+
df[" Owned"] = df[" Owned"].astype(str)
|
| 262 |
+
|
| 263 |
+
if "Brand" not in df.columns:
|
| 264 |
+
df["Brand"] = ""
|
| 265 |
+
|
| 266 |
+
ordered_cols = ["Brand", " Name", " TD", " Color", " Owned"]
|
| 267 |
+
df = df[[c for c in ordered_cols if c in df.columns]]
|
| 268 |
+
return df
|
| 269 |
+
|
| 270 |
+
# load CSV if present
|
| 271 |
if os.path.exists(DEFAULT_MATERIALS_CSV):
|
| 272 |
try:
|
| 273 |
+
loaded_df = pd.read_csv(DEFAULT_MATERIALS_CSV, index_col=False)
|
| 274 |
+
loaded_df = normalize_filament_df(loaded_df)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
|
| 276 |
+
initial_df = loaded_df.copy()
|
|
|
|
| 277 |
|
|
|
|
|
|
|
|
|
|
| 278 |
initial_filament_data = {
|
| 279 |
"Brand": initial_df["Brand"].tolist(),
|
| 280 |
" Name": initial_df[" Name"].tolist(),
|
| 281 |
" TD": initial_df[" TD"].tolist(),
|
| 282 |
" Color": initial_df[" Color"].tolist(),
|
| 283 |
}
|
| 284 |
+
if " Owned" in initial_df.columns:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
initial_filament_data[" Owned"] = initial_df[" Owned"].astype(str).tolist()
|
| 286 |
+
else:
|
| 287 |
+
initial_filament_data[" Owned"] = ["false"] * len(initial_df)
|
| 288 |
except Exception as e:
|
| 289 |
print(f"Warning: Could not load {DEFAULT_MATERIALS_CSV}: {e}. Using default.")
|
| 290 |
initial_df = pd.DataFrame(initial_filament_data)
|
| 291 |
else:
|
|
|
|
| 292 |
initial_df = pd.DataFrame(initial_filament_data)
|
| 293 |
initial_df.to_csv(DEFAULT_MATERIALS_CSV, index=False)
|
| 294 |
|
|
|
|
| 295 |
def run_autoforge_process(cmd, log_path):
|
|
|
|
| 296 |
from joblib import parallel_backend
|
| 297 |
+
cli_args = cmd[1:]
|
| 298 |
autoforge_main = importlib.import_module("autoforge.__main__")
|
| 299 |
|
| 300 |
exit_code = 0
|
|
|
|
| 302 |
redirect_stdout(log_f), redirect_stderr(log_f), parallel_backend("threading", n_jobs=-1):
|
| 303 |
try:
|
| 304 |
sys.argv = ["autoforge"] + cli_args
|
| 305 |
+
autoforge_main.main()
|
| 306 |
+
except SystemExit as e:
|
| 307 |
exit_code = e.code
|
| 308 |
+
except Exception as e:
|
| 309 |
+
log_f.write(f"\nERROR: {e}\n")
|
| 310 |
+
exit_code = -1
|
| 311 |
|
| 312 |
return exit_code
|
| 313 |
|
|
|
|
|
|
|
| 314 |
def create_empty_error_outputs(log_message=""):
|
| 315 |
return (
|
| 316 |
+
log_message,
|
| 317 |
+
None,
|
| 318 |
+
gr.update(visible=False, interactive=False),
|
| 319 |
)
|
| 320 |
|
| 321 |
def load_filaments_from_json_upload(file_obj):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
if file_obj is None:
|
| 323 |
current_script_df = filament_df_state.value
|
| 324 |
if current_script_df is not None and not current_script_df.empty:
|
|
|
|
| 336 |
data = data["Filaments"]
|
| 337 |
|
| 338 |
df_loaded = pd.DataFrame(data)
|
|
|
|
|
|
|
| 339 |
df_loaded.columns = [c.strip() for c in df_loaded.columns]
|
| 340 |
|
|
|
|
|
|
|
| 341 |
rename_map = {
|
| 342 |
+
"Name": " Name",
|
| 343 |
+
"Transmissivity": " TD",
|
| 344 |
"Color": " Color",
|
| 345 |
+
"Owned": " Owned",
|
| 346 |
}
|
| 347 |
df_loaded.rename(
|
| 348 |
columns={k: v for k, v in rename_map.items() if k in df_loaded.columns},
|
| 349 |
inplace=True,
|
| 350 |
)
|
| 351 |
|
| 352 |
+
df_loaded = normalize_filament_df(df_loaded)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 353 |
|
| 354 |
filament_df_state.value = df_loaded.copy()
|
| 355 |
|
|
|
|
| 359 |
|
| 360 |
except Exception as e:
|
| 361 |
gr.Error(f"Error loading JSON: {e}")
|
| 362 |
+
return filament_table.value
|
|
|
|
| 363 |
|
|
|
|
| 364 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 365 |
gr.Markdown("# [Autoforge](https://github.com/hvoss-techfak/AutoForge) Web UI")
|
| 366 |
|
|
|
|
| 376 |
'If you have Hueforge, you can export your filaments under "Filaments -> Export" in the Hueforge software. Please make sure to select "CSV" instead of "JSON" during the export dialog.'
|
| 377 |
)
|
| 378 |
gr.Markdown(
|
| 379 |
+
'If you want to load your personal library of Hueforge filaments, you can also simply paste this path into your explorer address bar: %APPDATA%\\HueForge\\Filaments\\ and import your "personal_library.json" using the "Load Filaments Json" button.'
|
| 380 |
)
|
| 381 |
gr.Markdown(
|
| 382 |
'To remove a filament simply right-click on any of the fields and select "Delete Row"'
|
|
|
|
| 389 |
load_csv_button = gr.UploadButton(
|
| 390 |
"Load Filaments CSV", file_types=[".csv"]
|
| 391 |
)
|
| 392 |
+
load_json_button = gr.UploadButton(
|
| 393 |
"Load Filaments JSON", file_types=[".json"]
|
| 394 |
)
|
| 395 |
save_csv_button = gr.Button("Save Current Filaments to CSV")
|
| 396 |
+
|
| 397 |
filament_table = gr.DataFrame(
|
| 398 |
value=ensure_required_cols(
|
| 399 |
initial_df.copy().rename(
|
|
|
|
| 406 |
interactive=True,
|
| 407 |
label="Filaments",
|
| 408 |
)
|
| 409 |
+
|
| 410 |
gr.Markdown("## Add New Filament")
|
| 411 |
with gr.Row():
|
| 412 |
new_brand = gr.Textbox(label="Brand")
|
|
|
|
| 427 |
|
| 428 |
def update_filament_df_state_from_table(display_df):
|
| 429 |
display_df = ensure_required_cols(display_df, in_display_space=True)
|
|
|
|
|
|
|
| 430 |
if "Color (Hex)" in display_df.columns:
|
| 431 |
display_df["Color (Hex)"] = display_df["Color (Hex)"].apply(
|
| 432 |
rgba_to_hex
|
|
|
|
| 443 |
gr.Warning("Brand and Name cannot be empty.")
|
| 444 |
return current_display_df
|
| 445 |
|
| 446 |
+
color_hex = rgba_to_hex(color_hex)
|
| 447 |
|
| 448 |
new_row = pd.DataFrame(
|
| 449 |
[{"Brand": brand, "Name": name, "TD": td, "Color (Hex)": color_hex}]
|
|
|
|
| 469 |
columns={" Name": "Name", " TD": "TD", " Color": "Color (Hex)"}
|
| 470 |
)
|
| 471 |
try:
|
| 472 |
+
loaded_script_df = pd.read_csv(file_obj.name, index_col=False)
|
| 473 |
+
loaded_script_df = normalize_filament_df(loaded_script_df)
|
| 474 |
+
|
|
|
|
| 475 |
expected_cols = ["Brand", " Name", " TD", " Color"]
|
| 476 |
+
if not all(col in loaded_script_df.columns for col in expected_cols):
|
|
|
|
|
|
|
| 477 |
gr.Error(
|
| 478 |
f"CSV must contain columns: {', '.join(expected_cols)}. Found: {loaded_script_df.columns.tolist()}"
|
| 479 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 480 |
current_script_df = filament_df_state.value
|
| 481 |
if (
|
| 482 |
+
current_script_df is not None
|
| 483 |
+
and not current_script_df.empty
|
| 484 |
):
|
| 485 |
return current_script_df.rename(
|
| 486 |
columns={
|
|
|
|
| 502 |
)
|
| 503 |
except Exception as e:
|
| 504 |
gr.Error(f"Error loading CSV: {e}")
|
|
|
|
| 505 |
current_script_df = filament_df_state.value
|
| 506 |
if current_script_df is not None and not current_script_df.empty:
|
| 507 |
return current_script_df.rename(
|
|
|
|
| 517 |
|
| 518 |
def save_filaments_to_file_for_download(current_script_df_from_state):
|
| 519 |
if (
|
| 520 |
+
current_script_df_from_state is None
|
| 521 |
+
or current_script_df_from_state.empty
|
| 522 |
):
|
| 523 |
gr.Warning("Filament table is empty. Nothing to save.")
|
| 524 |
return None
|
|
|
|
| 528 |
gr.Error(
|
| 529 |
f"Cannot save. DataFrame missing required script columns. Expected: {required_cols}. Found: {df_to_save.columns.tolist()}"
|
| 530 |
)
|
|
|
|
| 531 |
return None
|
| 532 |
temp_dir = os.path.join(GRADIO_OUTPUT_BASE_DIR, "_temp_downloads")
|
| 533 |
os.makedirs(temp_dir, exist_ok=True)
|
|
|
|
| 545 |
visible=True,
|
| 546 |
)
|
| 547 |
except Exception as e:
|
|
|
|
| 548 |
gr.Error(f"Error saving CSV for download: {e}")
|
| 549 |
return None
|
| 550 |
|
|
|
|
| 581 |
accordion_params_ordered_names = []
|
| 582 |
|
| 583 |
gr.Markdown(
|
| 584 |
+
'Here you can upload an image, adjust the parameters and run the Autoforge process. The filaments from the "Filament Management" Tab are automatically used.'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 585 |
)
|
| 586 |
|
| 587 |
with gr.Row():
|
| 588 |
with gr.Column(scale=1):
|
| 589 |
gr.Markdown("### Input Image (Required)")
|
| 590 |
+
input_image_component = gr.Image(
|
| 591 |
+
type="pil",
|
| 592 |
+
image_mode="RGBA",
|
| 593 |
label="Upload Image",
|
| 594 |
sources=["upload"],
|
| 595 |
interactive=True,
|
|
|
|
| 657 |
elem_id="run_button_full_width",
|
| 658 |
)
|
| 659 |
|
|
|
|
| 660 |
progress_output = gr.Textbox(
|
| 661 |
label="Console Output",
|
| 662 |
lines=15,
|
|
|
|
| 672 |
visible=False,
|
| 673 |
)
|
| 674 |
|
|
|
|
| 675 |
@spaces.GPU(duration=150)
|
| 676 |
def execute_autoforge_script(
|
| 677 |
+
current_filaments_df_state_val, input_image, *accordion_param_values
|
| 678 |
):
|
| 679 |
|
| 680 |
log_output = []
|
| 681 |
|
|
|
|
| 682 |
if input_image is None:
|
| 683 |
gr.Error("Input Image is required! Please upload an image.")
|
|
|
|
| 684 |
return create_empty_error_outputs("Error: Input Image is required!")
|
| 685 |
|
| 686 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + str(uuid.uuid4())
|
|
|
|
| 688 |
os.makedirs(run_output_dir_val, exist_ok=True)
|
| 689 |
current_run_output_dir.value = run_output_dir_val
|
| 690 |
|
|
|
|
| 691 |
if (
|
| 692 |
+
current_filaments_df_state_val is None
|
| 693 |
+
or current_filaments_df_state_val.empty
|
| 694 |
):
|
| 695 |
gr.Error("Filament table is empty. Please add filaments.")
|
|
|
|
|
|
|
|
|
|
| 696 |
return create_empty_error_outputs("Error: Filament table is empty.")
|
| 697 |
|
| 698 |
temp_filament_csv = os.path.join(run_output_dir_val, "materials.csv")
|
|
|
|
| 704 |
f"Error: Filament data is missing columns: {', '.join(missing_cols)}."
|
| 705 |
)
|
| 706 |
gr.Error(err_msg)
|
|
|
|
|
|
|
|
|
|
| 707 |
return create_empty_error_outputs(err_msg)
|
| 708 |
try:
|
| 709 |
df_to_save.to_csv(temp_filament_csv, index=False)
|
| 710 |
except Exception as e:
|
|
|
|
| 711 |
err_msg = f"Error saving temporary filament CSV: {e}"
|
| 712 |
gr.Error(err_msg)
|
| 713 |
return create_empty_error_outputs(err_msg)
|
| 714 |
|
| 715 |
+
command = ["autoforge"]
|
|
|
|
|
|
|
| 716 |
command.extend(["--csv_file", temp_filament_csv])
|
| 717 |
command.extend(["--output_folder", run_output_dir_val])
|
| 718 |
+
command.extend(["--disable_visualization_for_gradio", "1"])
|
| 719 |
|
| 720 |
try:
|
|
|
|
| 721 |
script_input_image_path = os.path.join(
|
| 722 |
run_output_dir_val, "input_image.png"
|
| 723 |
)
|
| 724 |
input_image.save(script_input_image_path, format="PNG")
|
| 725 |
command.extend(["--input_image", script_input_image_path])
|
| 726 |
except Exception as e:
|
|
|
|
| 727 |
err_msg = f"Error handling input image: {e}"
|
| 728 |
gr.Error(err_msg)
|
| 729 |
return create_empty_error_outputs(err_msg)
|
|
|
|
| 733 |
if arg_widget_val is None or arg_widget_val == "":
|
| 734 |
arg_info_list = [
|
| 735 |
item for item in get_script_args_info() if item["name"] == arg_name
|
| 736 |
+
]
|
| 737 |
if (
|
| 738 |
+
arg_info_list
|
| 739 |
+
and arg_info_list[0]["type"] == "checkbox"
|
| 740 |
+
and arg_widget_val is False
|
| 741 |
):
|
| 742 |
continue
|
| 743 |
else:
|
|
|
|
| 752 |
else:
|
| 753 |
command.extend([arg_name, str(arg_widget_val)])
|
| 754 |
|
|
|
|
|
|
|
| 755 |
log_output = [
|
| 756 |
"Starting Autoforge process at ",
|
| 757 |
f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n",
|
|
|
|
| 759 |
f"Command: {' '.join(command)}\n\n",
|
| 760 |
]
|
| 761 |
|
| 762 |
+
yield create_empty_error_outputs("".join(log_output))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 763 |
|
| 764 |
log_file = os.path.join(run_output_dir_val, "autoforge_live.log")
|
| 765 |
open(log_file, "w", encoding="utf-8").close()
|
| 766 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 767 |
import threading
|
| 768 |
|
| 769 |
class Worker(threading.Thread):
|
|
|
|
| 771 |
super().__init__(daemon=True)
|
| 772 |
self.cmd, self.log_path = cmd, log_path
|
| 773 |
self.returncode = None
|
| 774 |
+
self.exc = None
|
| 775 |
|
| 776 |
def run(self):
|
| 777 |
try:
|
| 778 |
self.returncode = run_autoforge_process(self.cmd, self.log_path)
|
| 779 |
except Exception as e:
|
|
|
|
| 780 |
self.exc = e
|
|
|
|
|
|
|
|
|
|
| 781 |
with open(self.log_path, "a", encoding="utf-8") as lf:
|
| 782 |
lf.write(
|
| 783 |
+
"\nERROR: {}. This usually means there was no GPU or the process took too long.\n".format(
|
| 784 |
+
exc_text(e)
|
| 785 |
+
)
|
| 786 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 787 |
self.returncode = -1
|
| 788 |
|
| 789 |
try:
|
|
|
|
| 792 |
|
| 793 |
preview_mtime = 0
|
| 794 |
last_push = 0
|
| 795 |
+
file_pos = 0
|
| 796 |
+
|
| 797 |
+
def _maybe_new_preview():
|
| 798 |
+
nonlocal preview_mtime
|
| 799 |
+
src = os.path.join(run_output_dir_val, "vis_temp.png")
|
| 800 |
+
if not os.path.exists(src):
|
| 801 |
+
return gr.update()
|
| 802 |
+
mtime = os.path.getmtime(src)
|
| 803 |
+
if mtime <= preview_mtime:
|
| 804 |
+
return gr.update()
|
| 805 |
+
preview_mtime = mtime
|
| 806 |
+
return src
|
| 807 |
|
| 808 |
while worker.is_alive() or file_pos < os.path.getsize(log_file):
|
|
|
|
| 809 |
with open(log_file, "r", encoding="utf-8") as lf:
|
| 810 |
lf.seek(file_pos)
|
| 811 |
new_txt = lf.read()
|
| 812 |
file_pos = lf.tell()
|
| 813 |
+
log_output.append(new_txt)
|
| 814 |
|
| 815 |
now = time.time()
|
| 816 |
+
if now - last_push >= 1.0:
|
| 817 |
current_preview = _maybe_new_preview()
|
| 818 |
yield (
|
| 819 |
"".join(log_output),
|
| 820 |
current_preview,
|
| 821 |
+
gr.update(),
|
| 822 |
)
|
| 823 |
last_push = now
|
| 824 |
|
| 825 |
time.sleep(0.05)
|
| 826 |
|
| 827 |
+
worker.join()
|
| 828 |
except RuntimeError as e:
|
| 829 |
+
log_output.append(repr(e))
|
| 830 |
+
gr.Error(str(e))
|
|
|
|
|
|
|
|
|
|
| 831 |
with open(log_file, "r", encoding="utf-8") as lf:
|
| 832 |
lf.seek(file_pos)
|
| 833 |
new_txt = lf.read()
|
| 834 |
file_pos = lf.tell()
|
| 835 |
+
log_output.append(new_txt)
|
| 836 |
yield (
|
| 837 |
"".join(log_output),
|
| 838 |
+
gr.update(),
|
| 839 |
+
gr.update(),
|
| 840 |
)
|
| 841 |
return create_empty_error_outputs(str(e))
|
| 842 |
|
| 843 |
if getattr(worker, "exc", None) is not None:
|
|
|
|
| 844 |
err_msg = f"GPU run failed: {worker.exc}"
|
| 845 |
+
log_output.append(f"\n{err_msg}\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 846 |
gr.Error(err_msg)
|
| 847 |
yield (
|
| 848 |
"".join(log_output),
|
| 849 |
+
gr.update(),
|
| 850 |
gr.update(),
|
| 851 |
)
|
| 852 |
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 853 |
|
| 854 |
+
with open(log_file, "r", encoding="utf-8") as lf:
|
| 855 |
+
lf.seek(file_pos)
|
| 856 |
+
log_output.append(lf.read())
|
| 857 |
|
| 858 |
+
return_code = worker.returncode
|
| 859 |
|
| 860 |
files_to_offer = [
|
| 861 |
p
|
|
|
|
| 870 |
png_path = os.path.join(run_output_dir_val, "final_model.png")
|
| 871 |
out_png = png_path if os.path.exists(png_path) else None
|
| 872 |
|
| 873 |
+
if return_code != 0:
|
| 874 |
+
err_msg = (
|
| 875 |
+
f"Autoforge exited with code {return_code}\n"
|
| 876 |
+
"See the console output above for details."
|
| 877 |
+
)
|
| 878 |
+
log_output.append(f"\n{err_msg}\n")
|
| 879 |
+
gr.Error(err_msg)
|
| 880 |
+
yield (
|
| 881 |
+
"".join(log_output),
|
| 882 |
+
out_png if out_png else gr.update(),
|
| 883 |
+
gr.update(),
|
| 884 |
+
)
|
| 885 |
+
return
|
| 886 |
+
|
| 887 |
+
log_output.append("\nAutoforge process completed successfully!")
|
| 888 |
|
| 889 |
zip_path = None
|
| 890 |
if files_to_offer:
|
| 891 |
zip_path = os.path.join(run_output_dir_val, "autoforge_results.zip")
|
| 892 |
+
log_output.append(f"\nZipping results to {os.path.basename(zip_path)}...")
|
| 893 |
try:
|
| 894 |
with zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_STORED) as zf:
|
| 895 |
for f in files_to_offer:
|
| 896 |
zf.write(f, os.path.basename(f))
|
| 897 |
+
log_output.append(" done.")
|
| 898 |
except Exception as e:
|
| 899 |
+
log_output.append(f"\nError creating zip file: {e}")
|
| 900 |
+
zip_path = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 901 |
|
| 902 |
if USE_WANDB:
|
| 903 |
run = None
|
|
|
|
| 908 |
notes="Autoforge Web UI run",
|
| 909 |
tags=["autoforge", "gradio"],
|
| 910 |
)
|
| 911 |
+
wlogs = {"input_image": wandb.Image(script_input_image_path)}
|
| 912 |
if out_png:
|
| 913 |
wlogs["output_image"] = wandb.Image(out_png)
|
|
|
|
| 914 |
material_csv = pd.read_csv(temp_filament_csv)
|
| 915 |
table = wandb.Table(dataframe=material_csv)
|
| 916 |
wlogs["materials"] = table
|
|
|
|
| 917 |
from wandb import Html
|
| 918 |
log_text = "".join(log_output).replace("\r", "\n")
|
| 919 |
|
| 920 |
def clean_log_strict(text: str) -> str:
|
|
|
|
| 921 |
allowed = set(string.printable) | {"\n", "\t"}
|
| 922 |
return "".join(ch for ch in text if ch in allowed)
|
| 923 |
|
| 924 |
log_text_cleaned = clean_log_strict(log_text)
|
| 925 |
wlogs["log"] = Html(f"<pre>{log_text_cleaned}</pre>")
|
| 926 |
|
|
|
|
| 927 |
wandb.log(wlogs)
|
| 928 |
except Exception as e:
|
|
|
|
| 929 |
print(e)
|
| 930 |
finally:
|
| 931 |
if run is not None:
|
| 932 |
run.finish()
|
| 933 |
|
| 934 |
yield (
|
| 935 |
+
"".join(log_output),
|
| 936 |
+
out_png,
|
| 937 |
+
gr.update(
|
| 938 |
value=zip_path,
|
| 939 |
visible=bool(zip_path),
|
| 940 |
interactive=bool(zip_path),
|
|
|
|
| 947 |
run_outputs = [
|
| 948 |
progress_output,
|
| 949 |
final_image_preview,
|
| 950 |
+
download_results,
|
| 951 |
]
|
| 952 |
|
| 953 |
run_button.click(execute_autoforge_script, inputs=run_inputs, outputs=run_outputs)
|
| 954 |
|
| 955 |
css = """ #run_button_full_width { width: 100%; } """
|
|
|
|
| 956 |
|
| 957 |
+
if __name__ == "__main__":
|
| 958 |
if not os.path.exists(DEFAULT_MATERIALS_CSV):
|
| 959 |
print(f"Creating default filament file: {DEFAULT_MATERIALS_CSV}")
|
| 960 |
try:
|
| 961 |
initial_df.to_csv(DEFAULT_MATERIALS_CSV, index=False)
|
| 962 |
except Exception as e:
|
| 963 |
print(f"Could not write default {DEFAULT_MATERIALS_CSV}: {e}")
|
| 964 |
+
print("To run the UI, execute: python app.py")
|
| 965 |
demo.queue(default_concurrency_limit=1).launch(share=False)
|