diff --git a/.github/workflows/genai-tools.yml b/.github/workflows/genai-tools.yml index 7cf8cb5f7f..29e3d3bc79 100644 --- a/.github/workflows/genai-tools.yml +++ b/.github/workflows/genai-tools.yml @@ -187,7 +187,7 @@ jobs: python -m pip install ${{ env.SRC_DIR }}/thirdparty/openvino_tokenizers -v ${{ needs.openvino_download.outputs.ov_wheel_source }} python -m pip install ${{ env.SRC_DIR }} -v ${{ needs.openvino_download.outputs.ov_wheel_source }} python -m pip install -r ${{ env.WWB_PATH }}/requirements.txt ${{ needs.openvino_download.outputs.ov_wheel_source }} - python -m pip install git+https://github.com/huggingface/optimum-intel.git@main#egg=optimum-intel + python -m pip install git+https://github.com/huggingface/optimum-intel.git@main working-directory: ${{ env.OV_INSTALL_DIR }} - name: WWB Tests run: | diff --git a/samples/export-requirements.txt b/samples/export-requirements.txt index f1cebcad4a..bfe9f40d8b 100644 --- a/samples/export-requirements.txt +++ b/samples/export-requirements.txt @@ -2,7 +2,7 @@ --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly openvino-tokenizers~=2025.1.0.0.dev -optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@faeebf3416d17e3a6761db5f2e05569e0319311b +optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@main numpy<2.0.0; sys_platform == 'darwin' einops==0.8.0 # For Qwen transformers_stream_generator==0.0.5 # For Qwen diff --git a/src/cpp/src/whisper/models/with_past_decoder.cpp b/src/cpp/src/whisper/models/with_past_decoder.cpp index 1ade0dea6b..2cebbbbde0 100644 --- a/src/cpp/src/whisper/models/with_past_decoder.cpp +++ b/src/cpp/src/whisper/models/with_past_decoder.cpp @@ -84,7 +84,7 @@ WhisperWithPastDecoder::WhisperWithPastDecoder(const std::filesystem::path& mode const ov::AnyMap& properties) { Logger::warn("Whisper decoder models with past is deprecated. Support will be removed in 2026.0.0 release.\n" "To obtain stateful decoder model use latest `optimum-intel` package:\n" - "pip install optimum-intel@git+https://github.com/huggingface/optimum-intel.git\n" + "pip install optimum-intel@git+https://github.com/huggingface/optimum-intel.git@main\n" "optimum-cli export openvino --trust-remote-code --model openai/whisper-tiny whisper-tiny"); ov::Core core = utils::singleton_core(); diff --git a/tests/python_tests/requirements.txt b/tests/python_tests/requirements.txt index 8d0f6d4e30..b326ec7be8 100644 --- a/tests/python_tests/requirements.txt +++ b/tests/python_tests/requirements.txt @@ -1,6 +1,6 @@ --extra-index-url https://download.pytorch.org/whl/cpu diffusers==0.32.2 -optimum-intel @ git+https://github.com/eaidova/optimum-intel@ea/stateful_seq2seq +optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@main numpy<2.0.0; platform_system == "Darwin" and platform_machine == "x86_64" onnx==1.17.0 pytest diff --git a/tools/llm_bench/requirements.txt b/tools/llm_bench/requirements.txt index ca6fd10053..86a3f3e57b 100644 --- a/tools/llm_bench/requirements.txt +++ b/tools/llm_bench/requirements.txt @@ -11,8 +11,8 @@ torch transformers>=4.40.0 diffusers>=0.22.0 #optimum is in dependency list of optimum-intel -git+https://github.com/huggingface/optimum-intel.git@faeebf3416d17e3a6761db5f2e05569e0319311b#egg=optimum-intel -git+https://github.com/openvinotoolkit/nncf.git@develop#egg=nncf +git+https://github.com/huggingface/optimum-intel.git@main +git+https://github.com/openvinotoolkit/nncf.git@develop packaging psutil timm