Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add version info to OpenVINO models #690

Merged
merged 2 commits into from
Apr 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 43 additions & 1 deletion optimum/exporters/openvino/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union

import onnx
from transformers.utils import is_tf_available, is_torch_available

from openvino.runtime import PartialShape, save_model
from openvino.runtime import Model, PartialShape, save_model
from openvino.runtime.exceptions import OVTypeError
from openvino.runtime.utils.types import get_element_type
from openvino.tools.ovc import convert_model
Expand All @@ -32,6 +33,14 @@
from optimum.exporters.onnx.convert import export_pytorch as export_pytorch_to_onnx
from optimum.exporters.onnx.convert import export_tensorflow as export_tensorflow_onnx
from optimum.exporters.utils import _get_submodels_and_export_configs
from optimum.intel.utils.import_utils import (
_nncf_version,
_optimum_intel_version,
_optimum_version,
_timm_version,
_torch_version,
_transformers_version,
)
from optimum.utils import DEFAULT_DUMMY_SHAPES, is_diffusers_available
from optimum.utils.save_utils import maybe_save_preprocessors

Expand Down Expand Up @@ -81,6 +90,8 @@ def _save_model(model, path: str, ov_config: Optional["OVConfig"] = None):

compress_to_fp16 = ov_config.dtype == "fp16"

library_name = TasksManager.infer_library_from_model(Path(path).parent)
model = _add_version_info_to_model(model, library_name)
save_model(model, path, compress_to_fp16)


Expand Down Expand Up @@ -689,3 +700,34 @@ def export_tokenizer(

for model, file_name in zip(converted, (OV_TOKENIZER_NAME, OV_DETOKENIZER_NAME)):
save_model(model, output / file_name.format(suffix))


def _add_version_info_to_model(model: Model, library_name: Optional[str] = None):
"""
Add dependency versions to OpenVINO model
"""
try:
model.set_rt_info(_transformers_version, ["optimum", "transformers_version"])
model.set_rt_info(_torch_version, ["optimum", "pytorch_version"])
model.set_rt_info(_optimum_intel_version, ["optimum", "optimum_intel_version"])
model.set_rt_info(_optimum_version, ["optimum", "optimum_version"])

if any("token_embeddings" in output.get_names() for output in model.outputs):
import sentence_transformers

model.set_rt_info(sentence_transformers.__version__, ["optimum", "sentence_transformers_version"])
if library_name == "diffusers":
Comment on lines +715 to +719
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why not :

Suggested change
if any("token_embeddings" in output.get_names() for output in model.outputs):
import sentence_transformers
model.set_rt_info(sentence_transformers.__version__, ["optimum", "sentence_transformers_version"])
if library_name == "diffusers":
if library_name == "sentence_transformers":
import sentence_transformers
model.set_rt_info(sentence_transformers.__version__, ["optimum", "sentence_transformers_version"])
elif library_name == "diffusers":

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That was my first attempt, but that didn't work, after optimum-cli export openvino -m BAAI/bge-base-en-v1.5 --library sentence_transformers bge-base-en-ov-st library_name is detected as transformers by TasksManager.infer_library_from_model. If I copy the config_sentence_transformers.json to the model directory, the model is detected as sentence-transformers. Should we save that file for models exported with sentence_transformers library?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, an option could be to re-use the library_name inferred from the original model, this solution also works for me

model.set_rt_info(_optimum_version, ["optimum", "diffusers_version"])
elif library_name == "timm":
model.set_rt_info(_timm_version, ["optimum", "timm_version"])
rt_info = model.get_rt_info()
if "nncf" in rt_info:
model.set_rt_info(_nncf_version, ["optimum", "nncf_version"])
input_model = rt_info["conversion_parameters"].get("input_model", None)
if input_model is not None and "onnx" in input_model.value:
model.set_rt_info(onnx.__version__, ["optimum", "onnx_version"])

except Exception:
pass

return model
1 change: 1 addition & 0 deletions optimum/intel/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
STR_OPERATION_TO_FUNC = {">": op.gt, ">=": op.ge, "==": op.eq, "!=": op.ne, "<=": op.le, "<": op.lt}

_optimum_version = importlib_metadata.version("optimum")
_optimum_intel_version = importlib_metadata.version("optimum-intel")
Copy link
Collaborator

@eaidova eaidova Apr 26, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why not from optimum.intel.version import __version__?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I liked the consistency with _optimum_version :) But also the importlib_metadata version gives the commit ID:

>>> from optimum.intel.version import __version__
>>> from optimum.intel.utils.import_utils import _optimum_intel_version
>>> __version__
'1.17.0.dev0'
>>> _optimum_intel_version
'1.17.0.dev0+9d58b66'


_transformers_available = importlib.util.find_spec("transformers") is not None
_transformers_version = "N/A"
Expand Down
4 changes: 4 additions & 0 deletions tests/openvino/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
OVStableDiffusionXLPipeline,
)
from optimum.intel.openvino.modeling_base import OVBaseModel
from optimum.intel.utils.import_utils import _transformers_version
from optimum.utils.save_utils import maybe_load_preprocessors


Expand Down Expand Up @@ -113,6 +114,9 @@ def _openvino_export(

if task == "text-generation":
self.assertEqual(ov_model.stateful, stateful and use_cache)
self.assertEqual(
ov_model.model.get_rt_info()["optimum"]["transformers_version"], _transformers_version
)

@parameterized.expand(SUPPORTED_ARCHITECTURES)
def test_export(self, model_type: str):
Expand Down
Loading