Update src/app.py
Browse filesFix columns, hide graph by default
- src/app.py +4 -2
src/app.py
CHANGED
|
@@ -186,7 +186,8 @@ col1, col2 = st.columns([1,1.3])
|
|
| 186 |
if gpu_vendor == "Apple":
|
| 187 |
col.warning("""For M1/M2 Apple chips, PyTorch uses [Metal Performance Shaders (MPS)](https://huggingface.co/docs/accelerate/usage_guides/mps) as backend.\\
|
| 188 |
Remember that Apple M1/M2 chips share memory between CPU and GPU.""", icon="⚠️")
|
| 189 |
-
|
|
|
|
| 190 |
st.write(f"#### [{model_name}](https://huggingface.co/{model_name}) ({custom_ceil(memory_table.iloc[3,0],1):.1f}B)")
|
| 191 |
|
| 192 |
dtypes = memory_table.columns.tolist()[::-1]
|
|
@@ -199,7 +200,8 @@ with col1:
|
|
| 199 |
info = _memory_table[_memory_table['dtype'] == dtype].set_index('Variable')
|
| 200 |
show_gpu_info(info, lora_pct, gpu_vendor)
|
| 201 |
st.write(memory_table.iloc[[0, 1, 2, 4]])
|
| 202 |
-
|
|
|
|
| 203 |
extra = ""
|
| 204 |
if gpu_vendor == "Apple":
|
| 205 |
st.warning("This graph is irrelevant for M1/M2 chips as they can't run in parallel.", icon="⚠️")
|
|
|
|
| 186 |
if gpu_vendor == "Apple":
|
| 187 |
col.warning("""For M1/M2 Apple chips, PyTorch uses [Metal Performance Shaders (MPS)](https://huggingface.co/docs/accelerate/usage_guides/mps) as backend.\\
|
| 188 |
Remember that Apple M1/M2 chips share memory between CPU and GPU.""", icon="⚠️")
|
| 189 |
+
_, col, _ = st.columns([1,3,1])
|
| 190 |
+
with col:
|
| 191 |
st.write(f"#### [{model_name}](https://huggingface.co/{model_name}) ({custom_ceil(memory_table.iloc[3,0],1):.1f}B)")
|
| 192 |
|
| 193 |
dtypes = memory_table.columns.tolist()[::-1]
|
|
|
|
| 200 |
info = _memory_table[_memory_table['dtype'] == dtype].set_index('Variable')
|
| 201 |
show_gpu_info(info, lora_pct, gpu_vendor)
|
| 202 |
st.write(memory_table.iloc[[0, 1, 2, 4]])
|
| 203 |
+
_, col, _ = st.columns([1,3,1])
|
| 204 |
+
with col.expander("Graph", expanded=False):
|
| 205 |
extra = ""
|
| 206 |
if gpu_vendor == "Apple":
|
| 207 |
st.warning("This graph is irrelevant for M1/M2 chips as they can't run in parallel.", icon="⚠️")
|