Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 9c156ac

Browse files
committedApr 26, 2024·
Add more dependencies
1 parent 9d58b66 commit 9c156ac

File tree

2 files changed

+41
-5
lines changed

2 files changed

+41
-5
lines changed
 

‎optimum/exporters/openvino/convert.py

+37-5
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,10 @@
2020
from pathlib import Path
2121
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
2222

23+
import onnx
2324
from transformers.utils import is_tf_available, is_torch_available
2425

25-
from openvino.runtime import PartialShape, save_model
26+
from openvino.runtime import Model, PartialShape, save_model
2627
from openvino.runtime.exceptions import OVTypeError
2728
from openvino.runtime.utils.types import get_element_type
2829
from openvino.tools.ovc import convert_model
@@ -33,8 +34,10 @@
3334
from optimum.exporters.onnx.convert import export_tensorflow as export_tensorflow_onnx
3435
from optimum.exporters.utils import _get_submodels_and_export_configs
3536
from optimum.intel.utils.import_utils import (
37+
_nncf_version,
3638
_optimum_intel_version,
3739
_optimum_version,
40+
_timm_version,
3841
_torch_version,
3942
_transformers_version,
4043
)
@@ -87,10 +90,8 @@ def _save_model(model, path: str, ov_config: Optional["OVConfig"] = None):
8790

8891
compress_to_fp16 = ov_config.dtype == "fp16"
8992

90-
model.set_rt_info(_transformers_version, ["optimum", "transformers_version"])
91-
model.set_rt_info(_torch_version, ["optimum", "pytorch_version"])
92-
model.set_rt_info(_optimum_intel_version, ["optimum", "optimum_intel_version"])
93-
model.set_rt_info(_optimum_version, ["optimum", "optimum_version"])
93+
library_name = TasksManager.infer_library_from_model(Path(path).parent)
94+
model = _add_version_info_to_model(model, library_name)
9495
save_model(model, path, compress_to_fp16)
9596

9697

@@ -699,3 +700,34 @@ def export_tokenizer(
699700

700701
for model, file_name in zip(converted, (OV_TOKENIZER_NAME, OV_DETOKENIZER_NAME)):
701702
save_model(model, output / file_name.format(suffix))
703+
704+
705+
def _add_version_info_to_model(model: Model, library_name: Optional[str] = None):
706+
"""
707+
Add dependency versions to OpenVINO model
708+
"""
709+
try:
710+
model.set_rt_info(_transformers_version, ["optimum", "transformers_version"])
711+
model.set_rt_info(_torch_version, ["optimum", "pytorch_version"])
712+
model.set_rt_info(_optimum_intel_version, ["optimum", "optimum_intel_version"])
713+
model.set_rt_info(_optimum_version, ["optimum", "optimum_version"])
714+
715+
if any("token_embeddings" in output.get_names() for output in model.outputs):
716+
import sentence_transformers
717+
718+
model.set_rt_info(sentence_transformers.__version__, ["optimum", "sentence_transformers_version"])
719+
if library_name == "diffusers":
720+
model.set_rt_info(_optimum_version, ["optimum", "diffusers_version"])
721+
elif library_name == "timm":
722+
model.set_rt_info(_timm_version, ["optimum", "timm_version"])
723+
rt_info = model.get_rt_info()
724+
if "nncf" in rt_info:
725+
model.set_rt_info(_nncf_version, ["optimum", "nncf_version"])
726+
input_model = rt_info["conversion_parameters"].get("input_model", None)
727+
if input_model is not None and "onnx" in input_model.value:
728+
model.set_rt_info(onnx.__version__, ["optimum", "onnx_version"])
729+
730+
except Exception:
731+
pass
732+
733+
return model

‎tests/openvino/test_export.py

+4
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
OVStableDiffusionXLPipeline,
4646
)
4747
from optimum.intel.openvino.modeling_base import OVBaseModel
48+
from optimum.intel.utils.import_utils import _transformers_version
4849
from optimum.utils.save_utils import maybe_load_preprocessors
4950

5051

@@ -113,6 +114,9 @@ def _openvino_export(
113114

114115
if task == "text-generation":
115116
self.assertEqual(ov_model.stateful, stateful and use_cache)
117+
self.assertEqual(
118+
ov_model.model.get_rt_info()["optimum"]["transformers_version"], _transformers_version
119+
)
116120

117121
@parameterized.expand(SUPPORTED_ARCHITECTURES)
118122
def test_export(self, model_type: str):

0 commit comments

Comments
 (0)
Please sign in to comment.