Skip to content

Commit 5b00440

Browse files
committed
4 cores
1 parent 3d9886b commit 5b00440

File tree

5 files changed

+18
-15
lines changed

5 files changed

+18
-15
lines changed

.github/workflows/causal_lm_cpp.yml

+6-4
Original file line numberDiff line numberDiff line change
@@ -684,23 +684,23 @@ jobs:
684684
visual_language_sample:
685685
strategy:
686686
fail-fast: false
687-
matrix: {runs-on: [ubuntu-20.04-16-core, macos-12]}
687+
matrix: {runs-on: [ubuntu-20.04-4-cores, macos-12]}
688688
runs-on: ${{ matrix.runs-on }}
689689
steps:
690690
- uses: actions/checkout@v4
691691
with: {submodules: recursive}
692692
- uses: actions/setup-python@v4
693693
with: {python-version: 3.12}
694694
- run: mkdir ./ov/
695-
- if: ${{ 'ubuntu-20.04-16-core' == matrix.runs-on }}
695+
- if: ${{ 'ubuntu-20.04-4-cores' == matrix.runs-on }}
696696
run: >
697697
curl ${{ env.l_ov_link }} | tar --directory ./ov/ --strip-components 1 -xz
698698
&& sudo ./ov/install_dependencies/install_openvino_dependencies.sh
699699
- if: ${{ 'macos-12' == matrix.runs-on }}
700700
run: >
701701
curl ${{ env.m_ov_link }} | tar --directory ./ov/ --strip-components 1 -xz
702702
&& brew install coreutils scons
703-
- run: OpenVINO_DIR=./ov/runtime/cmake/ cmake -DCMAKE_BUILD_TYPE=Release -B ./build/ ./
703+
- run: cmake -DOpenVINO_DIR=./ov/runtime/cmake/ -DCMAKE_BUILD_TYPE=Release -B ./build/ ./
704704
- run: cmake --build ./build/ --config Release --target visual_language_chat -j
705705
- name: Download and convert a model and an image
706706
run: |
@@ -710,8 +710,9 @@ jobs:
710710
python ./samples/cpp/visual_language_chat/export_MiniCPM-V-2_6.py ./miniCPM-V-2_6/
711711
wget https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11
712712
- run: >
713-
timeout 2m ./build/samples/cpp/visual_language_chat/visual_language_chat ./miniCPM-V-2_6/ d5fbbd1a-d484-415c-88cb-9986625b7b11
713+
./build/samples/cpp/visual_language_chat/visual_language_chat ./miniCPM-V-2_6/ d5fbbd1a-d484-415c-88cb-9986625b7b11
714714
<<< $'What is on the image?\nWhat is special on the image?'
715+
timeout-minutes: 2
715716
716717
cpp-continuous-batching-ubuntu:
717718
runs-on: ubuntu-20.04-8-cores
@@ -855,6 +856,7 @@ jobs:
855856
cpp-beam_search_causal_lm-Qwen-7B-Chat, cpp-beam_search_causal_lm-Qwen1_5-7B-Chat, cpp-beam_search_causal_lm-Phi-2,
856857
cpp-beam_search_causal_lm-notus-7b-v1, cpp-speculative_decoding_lm-ubuntu, cpp-prompt_lookup_decoding_lm-ubuntu,
857858
cpp-Phi-1_5, cpp-greedy_causal_lm-redpajama-3b-chat, cpp-chat_sample-ubuntu, cpp-continuous-batching-ubuntu,
859+
visual_language_sample,
858860
cpp-continuous-batching-windows, cpp-continuous-batching-macos]
859861
if: ${{ always() }}
860862
runs-on: ubuntu-latest

samples/CMakeLists.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ install(DIRECTORY
2525
cpp/greedy_causal_lm
2626
cpp/multinomial_causal_lm
2727
# Don't install prompt_lookup_decoding_lm and speculative_decoding_lm because they don't use openvino_genai library and arent verifyed yet.
28-
# Don't install continuous_batching_accuracy and continuous_batching_benchmark because they depend on json.
28+
# Don't install continuous_batching_accuracy and continuous_batching_benchmark because CB isn't ready.
2929
cpp/visual_language_chat
3030
cpp/whisper_speech_recognition
3131
cpp/stable_diffusion

samples/cpp/visual_language_chat/CMakeLists.txt

+7-5
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
# Copyright (C) 2023-2024 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
33

4-
find_package(OpenVINOGenAI REQUIRED PATHS
5-
"${CMAKE_BINARY_DIR}" # Reuse the package from the build.
6-
${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO.
4+
find_package(OpenVINOGenAI REQUIRED
5+
PATHS
6+
"${CMAKE_BINARY_DIR}" # Reuse the package from the build.
7+
${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO.
8+
NO_CMAKE_FIND_ROOT_PATH
79
)
810

911
file(DOWNLOAD
@@ -14,11 +16,11 @@ file(DOWNLOAD
1416
add_executable(visual_language_chat visual_language_chat.cpp load_image.cpp)
1517
target_include_directories(visual_language_chat PRIVATE "${CMAKE_CURRENT_SOUCE_DIR}" "${CMAKE_BINARY_DIR}")
1618
target_link_libraries(visual_language_chat PRIVATE openvino::genai)
19+
1720
set_target_properties(visual_language_chat PROPERTIES
18-
COMPILE_PDB_NAME chat_sample
21+
COMPILE_PDB_NAME visual_language_chat
1922
# Ensure out of box LC_RPATH on macOS with SIP
2023
INSTALL_RPATH_USE_LINK_PATH ON)
21-
target_compile_features(visual_language_chat PRIVATE cxx_std_11)
2224

2325
install(TARGETS visual_language_chat
2426
RUNTIME DESTINATION samples_bin/

samples/cpp/visual_language_chat/export_MiniCPM-V-2_6.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -958,8 +958,8 @@ def main():
958958
gc.collect()
959959

960960
convert_vision_encoder(model, model_dir)
961-
ov_cpm = init_model(model_dir, "CPU")
962-
print(ov_cpm.chat(Image.open(requests.get("https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11", stream=True).raw), [{"role": "user", "content": "What is unusual on this image?"}], ov_cpm.processor.tokenizer))
961+
# ov_cpm = init_model(model_dir, "CPU")
962+
# print(ov_cpm.chat(Image.open(requests.get("https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11", stream=True).raw), [{"role": "user", "content": "What is unusual on this image?"}], ov_cpm.processor.tokenizer))
963963

964964
if "__main__" == __name__:
965965
main()

samples/cpp/visual_language_chat/visual_language_chat.cpp

+2-3
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,10 @@ bool print_subword(std::string&& subword) {
1010
}
1111

1212
int main(int argc, char* argv[]) {
13-
if (4 != argc) {
13+
if (3 != argc) {
1414
throw std::runtime_error(std::string{"Usage "} + argv[0] + " <MODEL_DIR> <IMAGE_FILE>");
1515
}
1616
ov::Tensor image = utils::load_image(argv[2]);
17-
ov::Tensor image2 = utils::load_image(argv[3]);
1817
std::string device = "CPU"; // GPU can be used as well
1918
ov::AnyMap enable_compile_cache;
2019
if ("GPU" == device) {
@@ -32,7 +31,7 @@ int main(int argc, char* argv[]) {
3231
}
3332
pipe.generate(
3433
prompt,
35-
ov::genai::images(std::vector{image2, image}),
34+
ov::genai::image(std::move(image)),
3635
ov::genai::streamer(print_subword)
3736
);
3837
std::cout << "\n----------\n"

0 commit comments

Comments
 (0)