Skip to content

Commit bfc8663

Browse files
authored
Prevent loading model for export if it is not supported (#710)
1 parent d9c8f9f commit bfc8663

File tree

2 files changed

+6
-1
lines changed

2 files changed

+6
-1
lines changed

optimum/exporters/openvino/__main__.py

+5
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,10 @@ def main_export(
219219
model_type = config.model_type.replace("_", "-")
220220
if model_type not in TasksManager._SUPPORTED_MODEL_TYPE:
221221
custom_architecture = True
222+
if custom_export_configs is None:
223+
raise ValueError(
224+
f"Trying to export a {model_type} model, that is a custom or unsupported architecture, but no custom export configuration was passed as `custom_export_configs`. Please refer to https://huggingface.co/docs/optimum/main/en/exporters/onnx/usage_guides/export_a_model#custom-export-of-transformers-models for an example on how to export custom models. Please open an issue at https://github.com/huggingface/optimum-intel/issues if you would like the model type {model_type} to be supported natively in the OpenVINO export."
225+
)
222226
elif task not in TasksManager.get_supported_tasks_for_model_type(
223227
model_type, exporter="openvino", library_name=library_name
224228
):
@@ -232,6 +236,7 @@ def main_export(
232236
raise ValueError(
233237
f"Asked to export a {model_type} model for the task {task}{autodetected_message}, but the Optimum OpenVINO exporter only supports the tasks {', '.join(model_tasks.keys())} for {model_type}. Please use a supported task. Please open an issue at https://github.com/huggingface/optimum/issues if you would like the task {task} to be supported in the ONNX export for {model_type}."
234238
)
239+
235240
if is_transformers_version(">=", "4.36") and model_type in SDPA_ARCHS_ONNX_EXPORT_NOT_SUPPORTED:
236241
loading_kwargs["attn_implementation"] = "eager"
237242
# there are some difference between remote and in library representation of past key values for some models,

optimum/exporters/openvino/convert.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -547,7 +547,7 @@ def export_from_model(
547547
# TODO: support onnx_config.py in the model repo
548548
if custom_architecture and custom_export_configs is None:
549549
raise ValueError(
550-
f"Trying to export a {model_type} model, that is a custom or unsupported architecture, but no custom export configuration was passed as `custom_export_configs`. Please refer to https://huggingface.co/docs/optimum/main/en/exporters/onnx/usage_guides/export_a_model#custom-export-of-transformers-models for an example on how to export custom models. Please open an issue at https://github.com/huggingface/optimum/issues if you would like the model type {model_type} to be supported natively in the ONNX export."
550+
f"Trying to export a {model_type} model, that is a custom or unsupported architecture, but no custom export configuration was passed as `custom_export_configs`. Please refer to https://huggingface.co/docs/optimum/main/en/exporters/onnx/usage_guides/export_a_model#custom-export-of-transformers-models for an example on how to export custom models. Please open an issue at https://github.com/huggingface/optimum-intel/issues if you would like the model type {model_type} to be supported natively in the OpenVINO export."
551551
)
552552

553553
if task.startswith("text-generation") and model.config.is_encoder_decoder:

0 commit comments

Comments
 (0)