badaoui HF Staff commited on
Commit
0a7e495
·
verified ·
1 Parent(s): 7797fbb

Update optimum_neuron_export.py

Browse files
Files changed (1) hide show
  1. optimum_neuron_export.py +50 -17
optimum_neuron_export.py CHANGED
@@ -134,17 +134,16 @@ def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discuss
134
  return discussion
135
  return None
136
 
137
- def export(model_id: str, task_or_pipeline:str, model_type: str, folder: str):
138
-
139
- yield f"📦 Exporting model `{model_id}` for task `{task_or_pipeline}`..."
140
-
141
- if model_type == "diffusers":
142
- model_class = DIFFUSION_PIPELINE_MAPPING.get(task_or_pipeline)
143
-
144
  inputs = get_default_inputs(task_or_pipeline)
145
- yield f"🔧 Using default inputs: {inputs}"
146
 
147
- if task_or_pipeline in ENCODER_TASKS or SEQ2SEQ_TAKS:
 
148
  result = main_export(
149
  model_name_or_path=model_id,
150
  output=folder,
@@ -155,16 +154,22 @@ def export(model_id: str, task_or_pipeline:str, model_type: str, folder: str):
155
  **inputs,
156
  )
157
 
158
- if task_or_pipeline in DECODER_TASKS:
159
- neuron_config = NeuronModelForCausalLM.get_neuron_config(model_name_or_path=model_id, **inputs)
 
 
 
 
160
  neuron_model = NeuronModelForCausalLM.export(
161
- model_id=export_decoder_id,
162
  neuron_config=neuron_config,
163
- token = HF_TOKEN,
164
  )
165
- model.save_pretrained(folder)
166
 
167
- if task_or_pipeline in DIFFUSION_PIPELINE_MAPPING:
 
 
168
  model = model_class.from_pretrained(model_id)
169
  input_shapes = build_stable_diffusion_components_mandatory_shapes(**inputs)
170
  compiler_kwargs = {"auto_cast": "matmul", "auto_cast_type": "bf16"}
@@ -179,24 +184,49 @@ def export(model_id: str, task_or_pipeline:str, model_type: str, folder: str):
179
  model=model,
180
  **input_shapes,
181
  )
 
 
 
 
182
 
183
 
184
- def export_and_git_add(model_id: str, task_or_pipeline: str, model_type: str, folder: str, token: str) -> Any:
 
185
 
186
  try:
 
187
  export(model_id, task_or_pipeline, model_type, folder)
188
  yield "✅ Export completed successfully."
189
  except Exception as e:
190
  yield f"❌ Export failed with error: {e}"
191
  raise
192
 
 
 
 
 
 
 
 
 
 
193
  operations = []
 
194
  for root, _, files in os.walk(folder):
195
  for filename in files:
196
  file_path = os.path.join(root, filename)
197
  repo_path = os.path.relpath(file_path, folder)
198
  operations.append(CommitOperationAdd(path_in_repo=repo_path, path_or_fileobj=file_path))
 
 
 
199
 
 
 
 
 
 
 
200
  try:
201
  card = ModelCard.load(model_id, token=token)
202
  if not hasattr(card.data, "tags") or card.data.tags is None:
@@ -213,10 +243,13 @@ def export_and_git_add(model_id: str, task_or_pipeline: str, model_type: str, fo
213
  readme_op.path_or_fileobj = readme_path
214
  else:
215
  operations.append(CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=readme_path))
 
 
216
 
217
  except Exception as e:
218
  yield f"⚠️ Warning: Could not update model card: {e}"
219
 
 
220
  yield ("__RETURN__", operations)
221
 
222
  def generate_neuron_repo_name(api, original_model_id: str, task_or_pipeline: str, token:str) -> str:
@@ -439,7 +472,7 @@ Generated using: [Optimum Neuron Compiler Space]({SPACES_URL})
439
  yield f"❌ Failed to create README PR: {e}"
440
  raise
441
 
442
- # --- Updated upload_to_custom_repo function (unchanged) ---
443
  def upload_to_custom_repo(
444
  operations: List[CommitOperationAdd],
445
  custom_repo_id: str,
 
134
  return discussion
135
  return None
136
 
137
+ def export(model_id: str, task_or_pipeline: str, model_type: str, folder: str):
138
+ """Export model to Neuron format. This is NOT a generator."""
139
+
140
+ print(f"📦 Exporting model `{model_id}` for task `{task_or_pipeline}`...")
141
+
 
 
142
  inputs = get_default_inputs(task_or_pipeline)
143
+ print(f"🔧 Using default inputs: {inputs}")
144
 
145
+ # ENCODER and SEQ2SEQ tasks
146
+ if task_or_pipeline in ENCODER_TASKS or task_or_pipeline in SEQ2SEQ_TAKS:
147
  result = main_export(
148
  model_name_or_path=model_id,
149
  output=folder,
 
154
  **inputs,
155
  )
156
 
157
+ # DECODER tasks
158
+ elif task_or_pipeline in DECODER_TASKS:
159
+ neuron_config = NeuronModelForCausalLM.get_neuron_config(
160
+ model_name_or_path=model_id,
161
+ **inputs
162
+ )
163
  neuron_model = NeuronModelForCausalLM.export(
164
+ model_id=model_id, # Fixed variable name
165
  neuron_config=neuron_config,
166
+ token=HF_TOKEN,
167
  )
168
+ neuron_model.save_pretrained(folder) # Fixed variable name
169
 
170
+ # DIFFUSION tasks
171
+ elif task_or_pipeline in DIFFUSION_PIPELINE_MAPPING:
172
+ model_class = DIFFUSION_PIPELINE_MAPPING.get(task_or_pipeline)
173
  model = model_class.from_pretrained(model_id)
174
  input_shapes = build_stable_diffusion_components_mandatory_shapes(**inputs)
175
  compiler_kwargs = {"auto_cast": "matmul", "auto_cast_type": "bf16"}
 
184
  model=model,
185
  **input_shapes,
186
  )
187
+ else:
188
+ raise ValueError(f"Unsupported task or pipeline: {task_or_pipeline}")
189
+
190
+ print(f"✅ Export completed successfully to {folder}")
191
 
192
 
193
+ def export_and_git_add(model_id: str, task_or_pipeline: str, model_type: str, folder: str, token: str):
194
+ """Export model and prepare git operations. This IS a generator."""
195
 
196
  try:
197
+ # Actually execute the export (not a generator anymore)
198
  export(model_id, task_or_pipeline, model_type, folder)
199
  yield "✅ Export completed successfully."
200
  except Exception as e:
201
  yield f"❌ Export failed with error: {e}"
202
  raise
203
 
204
+ # Verify that files were actually created
205
+ if not os.path.exists(folder) or not os.listdir(folder):
206
+ error_msg = f"❌ Export folder is empty or doesn't exist: {folder}"
207
+ yield error_msg
208
+ raise Exception(error_msg)
209
+
210
+ yield f"📁 Found exported files in {folder}"
211
+
212
+ # Collect all files for git operations
213
  operations = []
214
+ file_count = 0
215
  for root, _, files in os.walk(folder):
216
  for filename in files:
217
  file_path = os.path.join(root, filename)
218
  repo_path = os.path.relpath(file_path, folder)
219
  operations.append(CommitOperationAdd(path_in_repo=repo_path, path_or_fileobj=file_path))
220
+ file_count += 1
221
+
222
+ yield f"📦 Prepared {file_count} files for upload"
223
 
224
+ if file_count == 0:
225
+ error_msg = "❌ No files found to upload after export"
226
+ yield error_msg
227
+ raise Exception(error_msg)
228
+
229
+ # Update model card
230
  try:
231
  card = ModelCard.load(model_id, token=token)
232
  if not hasattr(card.data, "tags") or card.data.tags is None:
 
243
  readme_op.path_or_fileobj = readme_path
244
  else:
245
  operations.append(CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=readme_path))
246
+
247
+ yield "📝 Updated model card with neuron tag"
248
 
249
  except Exception as e:
250
  yield f"⚠️ Warning: Could not update model card: {e}"
251
 
252
+ # Return the operations
253
  yield ("__RETURN__", operations)
254
 
255
  def generate_neuron_repo_name(api, original_model_id: str, task_or_pipeline: str, token:str) -> str:
 
472
  yield f"❌ Failed to create README PR: {e}"
473
  raise
474
 
475
+ # --- Updated upload_to_custom_repo function ---
476
  def upload_to_custom_repo(
477
  operations: List[CommitOperationAdd],
478
  custom_repo_id: str,