Skip to content

Commit 770f7ed

Browse files
committed
Merge branch 'master' into clean-up
2 parents ff27cf7 + 6d2763a commit 770f7ed

File tree

77 files changed

+11309
-636
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

77 files changed

+11309
-636
lines changed

.github/workflows/causal_lm_cpp.yml

+17-23
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ concurrency:
1414

1515
env:
1616
l_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2024.5.0-16570-19eb02fe60b/l_openvino_toolkit_ubuntu20_2024.5.0.dev20240830_x86_64.tgz
17+
l_u22_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2024.5.0-16570-19eb02fe60b/l_openvino_toolkit_ubuntu22_2024.5.0.dev20240830_x86_64.tgz
1718
m_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2024.5.0-16570-19eb02fe60b/m_openvino_toolkit_macos_12_6_2024.5.0.dev20240830_x86_64.tgz
1819
w_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2024.5.0-16570-19eb02fe60b/w_openvino_toolkit_windows_2024.5.0.dev20240830_x86_64.zip
1920
jobs:
@@ -202,8 +203,7 @@ jobs:
202203
echo "Multi prompt" passed
203204
204205
cpp-greedy_causal_lm-windows:
205-
runs-on: windows-latest
206-
if: ${{ false }} # TODO: fix Windows
206+
runs-on: windows-2019-16-core
207207
env:
208208
PYTHONIOENCODING: "utf8"
209209
defaults:
@@ -218,6 +218,8 @@ jobs:
218218
python-version: 3.9
219219
- name: Configure Developer Command Prompt for Microsoft Visual C++
220220
uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0
221+
with:
222+
toolset: 14.29
221223
- run: curl --output ov.zip ${{ env.w_ov_link }}
222224
- run: unzip -d ov ov.zip
223225
- run: dirs=(ov/*) && mv ov/*/* ov && rmdir "${dirs[@]}"
@@ -681,11 +683,8 @@ jobs:
681683
diff pred2.txt ref.txt
682684
echo "Chat sample python" passed
683685
684-
visual_chat_sample-ubuntu:
686+
visual_language_chat_sample-ubuntu:
685687
runs-on: ubuntu-22.04-16-cores
686-
defaults:
687-
run:
688-
shell: bash
689688
steps:
690689
- uses: actions/checkout@v4
691690
with:
@@ -703,25 +702,19 @@ jobs:
703702
source ./ov/setupvars.sh
704703
cmake -DCMAKE_BUILD_TYPE=Release -S ./ -B ./build/
705704
cmake --build ./build/ --config Release --target visual_language_chat -j
706-
- name: Download and convert and model
705+
- name: Download and convert a model and an image
707706
run: |
708707
source ./ov/setupvars.sh
709-
python -m pip install --upgrade-strategy eager -r ./samples/requirements.txt --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
710708
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
709+
python -m pip install --upgrade-strategy eager -r ./samples/requirements.txt --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
711710
python ./samples/cpp/visual_language_chat/export_MiniCPM-V-2_6.py ./miniCPM-V-2_6/
712-
wget https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11
713-
714-
- name: Run C++ chat sample
715-
run: |
716-
source ./ov/setupvars.sh
717-
timeout --verbose 120s ./build/samples/cpp/visual_language_chat/visual_language_chat ./miniCPM-V-2_6/ cat.jpg <<< $'What is on the image?\nWhat is special on the image?'
718-
- name: Run Python chat sample
719-
run: |
711+
wget https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11 --output-document cat.jpg
712+
713+
- name: Run chat chat sample
714+
run: >
720715
source ./ov/setupvars.sh
721-
export PYTHONPATH=./build/:$PYTHONPATH
722-
printf 'What is on the image?\nWhat is special on the image?\n' > ./input.txt
723-
timeout 120s python ./samples/python/vlm_chat_sample/vlm_chat_sample.py ./miniCPM-V-2_6/ d5fbbd1a-d484-415c-88cb-9986625b7b11 < input.txt > ./pred.txt || ( [[ $? -eq 124 ]] && \
724-
echo "Timeout reached, but it's excpected." )
716+
&& timeout 120s ./build/samples/cpp/visual_language_chat/visual_language_chat ./miniCPM-V-2_6/ cat.jpg
717+
<<< $'What is on the image?\nWhat is special on the image?'
725718
726719
cpp-continuous-batching-ubuntu:
727720
runs-on: ubuntu-20.04-8-cores
@@ -767,8 +760,7 @@ jobs:
767760
timeout 200s ./build/samples/cpp/continuous_batching_benchmark/continuous_batching_benchmark -n 10 --dynamic_split_fuse --max_batch_size 256 --max_input_len 256 -m ./TinyLlama-1.1B-Chat-v1.0/ --dataset ./ShareGPT_V3_unfiltered_cleaned_split.json --cache_size 1
768761
769762
cpp-continuous-batching-windows:
770-
runs-on: windows-latest
771-
if: ${{ false }} # TODO: fix Windows
763+
runs-on: windows-2019-16-core
772764
env:
773765
PYTHONIOENCODING: "utf8"
774766
defaults:
@@ -783,6 +775,8 @@ jobs:
783775
python-version: 3.9
784776
- name: Configure Developer Command Prompt for Microsoft Visual C++
785777
uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0
778+
with:
779+
toolset: 14.29
786780
- name: Install OpenVINO
787781
run: |
788782
curl --output ov.zip ${{ env.w_ov_link }}
@@ -865,7 +859,7 @@ jobs:
865859
cpp-beam_search_causal_lm-Qwen-7B-Chat, cpp-beam_search_causal_lm-Qwen1_5-7B-Chat, cpp-beam_search_causal_lm-Phi-2,
866860
cpp-beam_search_causal_lm-notus-7b-v1, cpp-speculative_decoding_lm-ubuntu, cpp-prompt_lookup_decoding_lm-ubuntu,
867861
cpp-Phi-1_5, cpp-greedy_causal_lm-redpajama-3b-chat, cpp-chat_sample-ubuntu, cpp-continuous-batching-ubuntu,
868-
visual_language_sample-ubuntu,
862+
visual_language_chat_sample-ubuntu,
869863
cpp-continuous-batching-windows, cpp-continuous-batching-macos]
870864
if: ${{ always() }}
871865
runs-on: ubuntu-latest

.github/workflows/lcm_dreamshaper_cpp.yml

+9-5
Original file line numberDiff line numberDiff line change
@@ -67,11 +67,10 @@ jobs:
6767
- name: Run app
6868
run: |
6969
source ${{ env.OV_INSTALL_DIR }}/setupvars.sh
70-
./build/samples/cpp/stable_diffusion/stable_diffusion ./models/lcm_dreamshaper_v7/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
70+
./build/samples/cpp/text2image/stable_diffusion ./models/lcm_dreamshaper_v7/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
7171
7272
lcm_dreamshaper_v7_cpp-windows:
73-
runs-on: windows-latest
74-
if: ${{ false }} # TODO: fix Windows
73+
runs-on: windows-2019-16-core
7574
defaults:
7675
run:
7776
shell: pwsh
@@ -89,6 +88,11 @@ jobs:
8988
mv ./tmp/*/* .
9089
popd
9190
91+
- name: Configure Developer Command Prompt for Microsoft Visual C++
92+
uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0
93+
with:
94+
toolset: 14.29
95+
9296
- name: Build app
9397
run: |
9498
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
@@ -116,9 +120,9 @@ jobs:
116120
optimum-cli export openvino --model SimianLuo/LCM_Dreamshaper_v7 --task stable-diffusion --weight-format fp16 models/lcm_dreamshaper_v7/FP16
117121
118122
- name: Run app
119-
run: |
123+
run: >
120124
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
121-
./build/samples/cpp/stable_diffusion/Release/lcm_dreamshaper.exe ./models/lcm_dreamshaper_v7/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
125+
& "./build/samples/cpp/text2image/Release/stable_diffusion.exe ./models/lcm_dreamshaper_v7/FP16 'cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting'"
122126
123127
Overall_Status:
124128
name: ci/gha_overall_status_lcm

.github/workflows/llm_bench-python.yml

+6-7
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,7 @@ jobs:
4040
python -m pip install --upgrade pip
4141
python -m pip install flake8 pytest black
4242
GIT_CLONE_PROTECTION_ACTIVE=false pip install -r ${{ env.LLM_BENCH_PYPATH }}/requirements.txt
43-
python -m pip install -U --pre openvino openvino-tokenizers openvino-genai --extra-index-url
44-
https://storage.openvinotoolkit.org/simple/wheels/nightly
45-
GIT_CLONE_PROTECTION_ACTIVE=false pip install -r ${{ env.WWB_PATH }}/requirements.txt
46-
GIT_CLONE_PROTECTION_ACTIVE=false pip install ${{ env.WWB_PATH }}
47-
43+
python -m pip install -U --pre openvino openvino-tokenizers openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
4844
- name: Lint with flake8
4945
run: |
5046
# stop the build if there are Python syntax errors or undefined names
@@ -74,6 +70,9 @@ https://storage.openvinotoolkit.org/simple/wheels/nightly
7470
python ./llm_bench/python/benchmark.py -m ./ov_models/tiny-sd/pytorch/dldt/FP16/ -pf ./llm_bench/python/prompts/stable-diffusion.jsonl -d cpu -n 1
7571
- name: WWB Tests
7672
run: |
73+
GIT_CLONE_PROTECTION_ACTIVE=false pip install -r ${{ env.WWB_PATH }}/requirements.txt
74+
pip install git+https://github.com/huggingface/optimum.git
75+
GIT_CLONE_PROTECTION_ACTIVE=false pip install ${{ env.WWB_PATH }}
7776
python -m pytest llm_bench/python/who_what_benchmark/tests
7877
stateful:
7978
runs-on: ubuntu-20.04
@@ -86,13 +85,13 @@ https://storage.openvinotoolkit.org/simple/wheels/nightly
8685
run: |
8786
GIT_CLONE_PROTECTION_ACTIVE=false python -m pip install -r llm_bench/python/requirements.txt
8887
python -m pip uninstall --yes openvino
89-
python -m pip install -U --pre openvino openvino-tokenizers openvino-genai --extra-index-url
90-
https://storage.openvinotoolkit.org/simple/wheels/nightly
88+
python -m pip install -U --pre openvino openvino-tokenizers openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
9189
python llm_bench/python/convert.py --model_id TinyLlama/TinyLlama-1.1B-Chat-v1.0 --output_dir . --stateful
9290
grep beam_idx pytorch/dldt/FP32/openvino_model.xml
9391
- name: WWB Tests
9492
run: |
9593
GIT_CLONE_PROTECTION_ACTIVE=false pip install -r llm_bench/python/who_what_benchmark/requirements.txt
94+
pip install git+https://github.com/huggingface/optimum.git
9695
GIT_CLONE_PROTECTION_ACTIVE=false pip install llm_bench/python/who_what_benchmark/
9796
pip install pytest
9897
python -m pytest llm_bench/python/who_what_benchmark/tests

.github/workflows/stable_diffusion_1_5_cpp.yml

+23-7
Original file line numberDiff line numberDiff line change
@@ -63,15 +63,20 @@ jobs:
6363
run: |
6464
source openvino_sd_cpp/bin/activate
6565
optimum-cli export openvino --model dreamlike-art/dreamlike-anime-1.0 --weight-format fp16 --task stable-diffusion models/dreamlike-art-dreamlike-anime-1.0/FP16
66+
wget -O ./models/soulcard.safetensors https://civitai.com/api/download/models/72591
6667
67-
- name: Run app
68+
- name: Run main app
6869
run: |
6970
source ${{ env.OV_INSTALL_DIR }}/setupvars.sh
70-
./build/samples/cpp/stable_diffusion/stable_diffusion ./models/dreamlike-art-dreamlike-anime-1.0/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
71+
./build/samples/cpp/text2image/stable_diffusion ./models/dreamlike-art-dreamlike-anime-1.0/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
72+
73+
- name: Run LoRA app
74+
run: |
75+
source ${{ env.OV_INSTALL_DIR }}/setupvars.sh
76+
./build/samples/cpp/text2image/lora_stable_diffusion ./models/dreamlike-art-dreamlike-anime-1.0/FP16 "curly-haired unicorn in the forest, anime, line" ./models/soulcard.safetensors 0.7
7177
7278
stable_diffusion_1_5_cpp-windows:
73-
runs-on: windows-latest
74-
if: ${{ false }} # TODO: fix Windows
79+
runs-on: windows-2019-16-core
7580
defaults:
7681
run:
7782
shell: pwsh
@@ -89,6 +94,11 @@ jobs:
8994
mv ./tmp/*/* .
9095
popd
9196
97+
- name: Configure Developer Command Prompt for Microsoft Visual C++
98+
uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0
99+
with:
100+
toolset: 14.29
101+
92102
- name: Build app
93103
run: |
94104
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
@@ -114,11 +124,17 @@ jobs:
114124
run: |
115125
. "./openvino_sd_cpp/Scripts/Activate.ps1"
116126
optimum-cli export openvino --model dreamlike-art/dreamlike-anime-1.0 --task stable-diffusion --weight-format fp16 models/dreamlike-art-dreamlike-anime-1.0/FP16
127+
Invoke-WebRequest -Uri 'https://civitai.com/api/download/models/72591' -OutFile 'models/soulcard.safetensors'
117128
118-
- name: Run app
119-
run: |
129+
- name: Run main app
130+
run: >
131+
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
132+
& "./build/samples/cpp/text2image/Release/stable_diffusion.exe ./models/dreamlike-art-dreamlike-anime-1.0/FP16 'cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting'"
133+
134+
- name: Run LoRA app
135+
run: >
120136
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
121-
./build/samples/cpp/stable_diffusion/Release/stable_diffusion.exe ./models/dreamlike-art-dreamlike-anime-1.0/FP16 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting"
137+
& "./build/samples/cpp/text2image/Release/lora_stable_diffusion.exe ./models/dreamlike-art-dreamlike-anime-1.0/FP16 'curly-haired unicorn in the forest, anime, line' ./models/soulcard.safetensors 0.7"
122138
123139
Overall_Status:
124140
name: ci/gha_overall_status_stable_diffusion

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ It includes the following pipelines:
3434
6. [multinomial_causal_lm](./samples/cpp/multinomial_causal_lm/README.md)
3535
7. [prompt_lookup_decoding_lm](./samples/cpp/prompt_lookup_decoding_lm/README.md)
3636
8. [speculative_decoding_lm](./samples/cpp/speculative_decoding_lm/README.md)
37-
3. [Stable Diffuison and Latent Consistency Model (with LoRA) C++ image generation pipeline](./samples/cpp/stable_diffusion/README.md)
37+
3. [Stable Diffuison and Latent Consistency Model (with LoRA) C++ image generation pipeline](./samples/cpp/text2image/README.md)
3838

3939
### Requirements
4040

0 commit comments

Comments
 (0)