Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -93,18 +93,18 @@ class ModelConverter:
|
|
| 93 |
extracted_folder.rename(self.config.repo_path)
|
| 94 |
|
| 95 |
def convert_model(self, input_model_id: str) -> Tuple[bool, Optional[str]]:
|
| 96 |
-
"""Convert the model to ONNX format."""
|
| 97 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
result = subprocess.run(
|
| 99 |
-
|
| 100 |
-
sys.executable,
|
| 101 |
-
"-m",
|
| 102 |
-
"scripts.convert",
|
| 103 |
-
"--quantize",
|
| 104 |
-
"--trust_remote_code",
|
| 105 |
-
"--model_id",
|
| 106 |
-
input_model_id,
|
| 107 |
-
],
|
| 108 |
cwd=self.config.repo_path,
|
| 109 |
capture_output=True,
|
| 110 |
text=True,
|
|
@@ -140,7 +140,6 @@ class ModelConverter:
|
|
| 140 |
return str(e)
|
| 141 |
finally:
|
| 142 |
import shutil
|
| 143 |
-
|
| 144 |
shutil.rmtree(model_folder_path, ignore_errors=True)
|
| 145 |
|
| 146 |
def generate_readme(self, imi: str):
|
|
@@ -159,7 +158,7 @@ class ModelConverter:
|
|
| 159 |
|
| 160 |
def main():
|
| 161 |
"""Main application entry point."""
|
| 162 |
-
st.write("## Convert a Hugging Face model to ONNX")
|
| 163 |
|
| 164 |
try:
|
| 165 |
config = Config.from_env()
|
|
@@ -169,27 +168,24 @@ def main():
|
|
| 169 |
input_model_id = st.text_input(
|
| 170 |
"Enter the Hugging Face model ID to convert. Example: `EleutherAI/pythia-14m`"
|
| 171 |
)
|
| 172 |
-
|
| 173 |
if not input_model_id:
|
| 174 |
return
|
| 175 |
|
| 176 |
st.text_input(
|
| 177 |
-
|
| 178 |
type="password",
|
| 179 |
key="user_hf_token",
|
| 180 |
)
|
| 181 |
|
| 182 |
if config.hf_username == input_model_id.split("/")[0]:
|
| 183 |
same_repo = st.checkbox(
|
| 184 |
-
"
|
| 185 |
)
|
| 186 |
else:
|
| 187 |
same_repo = False
|
| 188 |
|
| 189 |
model_name = input_model_id.split("/")[-1]
|
| 190 |
-
|
| 191 |
output_model_id = f"{config.hf_username}/{model_name}"
|
| 192 |
-
|
| 193 |
if not same_repo:
|
| 194 |
output_model_id += "-ONNX"
|
| 195 |
|
|
@@ -200,29 +196,27 @@ def main():
|
|
| 200 |
st.link_button(f"Go to {output_model_id}", output_model_url, type="primary")
|
| 201 |
return
|
| 202 |
|
| 203 |
-
st.write(
|
| 204 |
st.code(output_model_url, language="plaintext")
|
| 205 |
|
| 206 |
if not st.button(label="Proceed", type="primary"):
|
| 207 |
return
|
| 208 |
|
| 209 |
-
with st.spinner("Converting model
|
| 210 |
success, stderr = converter.convert_model(input_model_id)
|
| 211 |
if not success:
|
| 212 |
st.error(f"Conversion failed: {stderr}")
|
| 213 |
return
|
| 214 |
-
|
| 215 |
st.success("Conversion successful!")
|
| 216 |
st.code(stderr)
|
| 217 |
|
| 218 |
-
with st.spinner("Uploading model
|
| 219 |
error = converter.upload_model(input_model_id, output_model_id)
|
| 220 |
if error:
|
| 221 |
st.error(f"Upload failed: {error}")
|
| 222 |
return
|
| 223 |
-
|
| 224 |
st.success("Upload successful!")
|
| 225 |
-
st.write("You can now
|
| 226 |
st.link_button(f"Go to {output_model_id}", output_model_url, type="primary")
|
| 227 |
|
| 228 |
except Exception as e:
|
|
@@ -232,3 +226,4 @@ def main():
|
|
| 232 |
|
| 233 |
if __name__ == "__main__":
|
| 234 |
main()
|
|
|
|
|
|
| 93 |
extracted_folder.rename(self.config.repo_path)
|
| 94 |
|
| 95 |
def convert_model(self, input_model_id: str) -> Tuple[bool, Optional[str]]:
|
| 96 |
+
"""Convert the model to ONNX format, always exporting attention maps."""
|
| 97 |
try:
|
| 98 |
+
cmd = [
|
| 99 |
+
sys.executable,
|
| 100 |
+
"-m", "scripts.convert",
|
| 101 |
+
"--quantize",
|
| 102 |
+
"--trust_remote_code",
|
| 103 |
+
"--model_id", input_model_id,
|
| 104 |
+
"--output_attentions",
|
| 105 |
+
]
|
| 106 |
result = subprocess.run(
|
| 107 |
+
cmd,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
cwd=self.config.repo_path,
|
| 109 |
capture_output=True,
|
| 110 |
text=True,
|
|
|
|
| 140 |
return str(e)
|
| 141 |
finally:
|
| 142 |
import shutil
|
|
|
|
| 143 |
shutil.rmtree(model_folder_path, ignore_errors=True)
|
| 144 |
|
| 145 |
def generate_readme(self, imi: str):
|
|
|
|
| 158 |
|
| 159 |
def main():
|
| 160 |
"""Main application entry point."""
|
| 161 |
+
st.write("## Convert a Hugging Face model to ONNX (with attentions)")
|
| 162 |
|
| 163 |
try:
|
| 164 |
config = Config.from_env()
|
|
|
|
| 168 |
input_model_id = st.text_input(
|
| 169 |
"Enter the Hugging Face model ID to convert. Example: `EleutherAI/pythia-14m`"
|
| 170 |
)
|
|
|
|
| 171 |
if not input_model_id:
|
| 172 |
return
|
| 173 |
|
| 174 |
st.text_input(
|
| 175 |
+
"Optional: Your Hugging Face write token. Fill it if you want to upload under your account.",
|
| 176 |
type="password",
|
| 177 |
key="user_hf_token",
|
| 178 |
)
|
| 179 |
|
| 180 |
if config.hf_username == input_model_id.split("/")[0]:
|
| 181 |
same_repo = st.checkbox(
|
| 182 |
+
"Upload ONNX weights to the same repository?"
|
| 183 |
)
|
| 184 |
else:
|
| 185 |
same_repo = False
|
| 186 |
|
| 187 |
model_name = input_model_id.split("/")[-1]
|
|
|
|
| 188 |
output_model_id = f"{config.hf_username}/{model_name}"
|
|
|
|
| 189 |
if not same_repo:
|
| 190 |
output_model_id += "-ONNX"
|
| 191 |
|
|
|
|
| 196 |
st.link_button(f"Go to {output_model_id}", output_model_url, type="primary")
|
| 197 |
return
|
| 198 |
|
| 199 |
+
st.write("Destination repository:")
|
| 200 |
st.code(output_model_url, language="plaintext")
|
| 201 |
|
| 202 |
if not st.button(label="Proceed", type="primary"):
|
| 203 |
return
|
| 204 |
|
| 205 |
+
with st.spinner("Converting model (including attention maps)…"):
|
| 206 |
success, stderr = converter.convert_model(input_model_id)
|
| 207 |
if not success:
|
| 208 |
st.error(f"Conversion failed: {stderr}")
|
| 209 |
return
|
|
|
|
| 210 |
st.success("Conversion successful!")
|
| 211 |
st.code(stderr)
|
| 212 |
|
| 213 |
+
with st.spinner("Uploading model…"):
|
| 214 |
error = converter.upload_model(input_model_id, output_model_id)
|
| 215 |
if error:
|
| 216 |
st.error(f"Upload failed: {error}")
|
| 217 |
return
|
|
|
|
| 218 |
st.success("Upload successful!")
|
| 219 |
+
st.write("You can now view the model on Hugging Face:")
|
| 220 |
st.link_button(f"Go to {output_model_id}", output_model_url, type="primary")
|
| 221 |
|
| 222 |
except Exception as e:
|
|
|
|
| 226 |
|
| 227 |
if __name__ == "__main__":
|
| 228 |
main()
|
| 229 |
+
|