Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,8 +1,3 @@
|
|
| 1 |
-
You can easily add that blurb by inserting a `gr.Markdown()` component within the same `gr.Column()` as your `sample_input_slider` and `run_button`. This effectively places it within Gradio's "flexbox" layout, ensuring it's always visible below the slider and button.
|
| 2 |
-
|
| 3 |
-
Here's your `app.py` code with the blurb added in the correct place. I've also updated the `run_inference` function to explicitly target `torch.device("cpu")` and removed the `@spaces.GPU()` decorator, which aligns with your successful run on ZeroCPU.
|
| 4 |
-
|
| 5 |
-
```python
|
| 6 |
import gradio as gr
|
| 7 |
import torch
|
| 8 |
from neuralop.models import FNO
|
|
@@ -61,7 +56,11 @@ def load_dataset():
|
|
| 61 |
data = torch.load(local_dataset_path, map_location='cpu')
|
| 62 |
if isinstance(data, dict) and 'x' in data:
|
| 63 |
FULL_DATASET_X = data['x']
|
| 64 |
-
elif isinstance(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
FULL_DATASET_X = data
|
| 66 |
else:
|
| 67 |
raise ValueError("Unknown dataset format or 'x' key missing.")
|
|
@@ -170,6 +169,4 @@ with gr.Blocks() as demo:
|
|
| 170 |
demo.load(load_initial_data_and_predict, inputs=None, outputs=[input_image_plot, output_image_plot])
|
| 171 |
|
| 172 |
if __name__ == "__main__":
|
| 173 |
-
demo.launch()
|
| 174 |
-
|
| 175 |
-
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import torch
|
| 3 |
from neuralop.models import FNO
|
|
|
|
| 56 |
data = torch.load(local_dataset_path, map_location='cpu')
|
| 57 |
if isinstance(data, dict) and 'x' in data:
|
| 58 |
FULL_DATASET_X = data['x']
|
| 59 |
+
elif isinstance(dYou can easily add that blurb by inserting a `gr.Markdown()` component within the same `gr.Column()` as your `sample_input_slider` and `run_button`. This effectively places it within Gradio's "flexbox" layout, ensuring it's always visible below the slider and button.
|
| 60 |
+
|
| 61 |
+
Here's your `app.py` code with the blurb added in the correct place. I've also updated the `run_inference` function to explicitly target `torch.device("cpu")` and removed the `@spaces.GPU()` decorator, which aligns with your successful run on ZeroCPU.
|
| 62 |
+
|
| 63 |
+
```pythonata, torch.Tensor):
|
| 64 |
FULL_DATASET_X = data
|
| 65 |
else:
|
| 66 |
raise ValueError("Unknown dataset format or 'x' key missing.")
|
|
|
|
| 169 |
demo.load(load_initial_data_and_predict, inputs=None, outputs=[input_image_plot, output_image_plot])
|
| 170 |
|
| 171 |
if __name__ == "__main__":
|
| 172 |
+
demo.launch()
|
|
|
|
|
|