Skip to content

Commit b11f65b

Browse files
Unpin optimum-intel version (#1680)
Original issue was fixed by huggingface/optimum-intel#1142
1 parent 7c7d03b commit b11f65b

File tree

5 files changed

+6
-6
lines changed

5 files changed

+6
-6
lines changed

.github/workflows/genai-tools.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ jobs:
187187
python -m pip install ${{ env.SRC_DIR }}/thirdparty/openvino_tokenizers -v ${{ needs.openvino_download.outputs.ov_wheel_source }}
188188
python -m pip install ${{ env.SRC_DIR }} -v ${{ needs.openvino_download.outputs.ov_wheel_source }}
189189
python -m pip install -r ${{ env.WWB_PATH }}/requirements.txt ${{ needs.openvino_download.outputs.ov_wheel_source }}
190-
python -m pip install git+https://github.com/huggingface/optimum-intel.git@main#egg=optimum-intel
190+
python -m pip install git+https://github.com/huggingface/optimum-intel.git@main
191191
working-directory: ${{ env.OV_INSTALL_DIR }}
192192
- name: WWB Tests
193193
run: |

samples/export-requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
--extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
33
--extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
44
openvino-tokenizers~=2025.1.0.0.dev
5-
optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@faeebf3416d17e3a6761db5f2e05569e0319311b
5+
optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@main
66
numpy<2.0.0; sys_platform == 'darwin'
77
einops==0.8.0 # For Qwen
88
transformers_stream_generator==0.0.5 # For Qwen

src/cpp/src/whisper/models/with_past_decoder.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ WhisperWithPastDecoder::WhisperWithPastDecoder(const std::filesystem::path& mode
8484
const ov::AnyMap& properties) {
8585
Logger::warn("Whisper decoder models with past is deprecated. Support will be removed in 2026.0.0 release.\n"
8686
"To obtain stateful decoder model use latest `optimum-intel` package:\n"
87-
"pip install optimum-intel@git+https://github.com/huggingface/optimum-intel.git\n"
87+
"pip install optimum-intel@git+https://github.com/huggingface/optimum-intel.git@main\n"
8888
"optimum-cli export openvino --trust-remote-code --model openai/whisper-tiny whisper-tiny");
8989
ov::Core core = utils::singleton_core();
9090

tests/python_tests/requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
--extra-index-url https://download.pytorch.org/whl/cpu
22
diffusers==0.32.2
3-
optimum-intel @ git+https://github.com/eaidova/optimum-intel@ea/stateful_seq2seq
3+
optimum-intel @ git+https://github.com/huggingface/optimum-intel.git@main
44
numpy<2.0.0; platform_system == "Darwin" and platform_machine == "x86_64"
55
onnx==1.17.0
66
pytest

tools/llm_bench/requirements.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ torch
1111
transformers>=4.40.0
1212
diffusers>=0.22.0
1313
#optimum is in dependency list of optimum-intel
14-
git+https://github.com/huggingface/optimum-intel.git@faeebf3416d17e3a6761db5f2e05569e0319311b#egg=optimum-intel
15-
git+https://github.com/openvinotoolkit/nncf.git@develop#egg=nncf
14+
git+https://github.com/huggingface/optimum-intel.git@main
15+
git+https://github.com/openvinotoolkit/nncf.git@develop
1616
packaging
1717
psutil
1818
timm

0 commit comments

Comments
 (0)