Skip to content

Commit 78a4ee9

Browse files
authored
[Samples] merge LLM samples to "text_generation" folder (#1411)
1 parent 250a231 commit 78a4ee9

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+303
-795
lines changed

.github/workflows/causal_lm_cpp.yml

+16-16
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,12 @@ jobs:
5353
wget https://huggingface.co/smangrul/tinyllama_lora_sql/resolve/main/adapter_model.safetensors?download=true -O adapter_model.safetensors
5454
- run: >
5555
. ./ov/setupvars.sh
56-
&& timeout 35s ./build/samples/cpp/multinomial_causal_lm/multinomial_causal_lm ./open_llama_3b_v2/ a
56+
&& timeout 35s ./build/samples/cpp/text_generation/multinomial_causal_lm ./open_llama_3b_v2/ a
5757
env:
5858
PYTHONPATH: "./build"
5959
- run: >
6060
. ./ov/setupvars.sh
61-
&& timeout 35s ./samples/python/multinomial_causal_lm/multinomial_causal_lm.py ./open_llama_3b_v2/ b
61+
&& timeout 35s ./samples/python/text_generation/multinomial_causal_lm.py ./open_llama_3b_v2/ b
6262
env:
6363
PYTHONPATH: "./build"
6464
- run: >
@@ -78,8 +78,8 @@ jobs:
7878
matrix:
7979
executable:
8080
[
81-
./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm,
82-
python ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py,
81+
./build/samples/cpp/text_generation/beam_search_causal_lm,
82+
python ./samples/python/text_generation/beam_search_causal_lm.py,
8383
]
8484
runs-on: ubuntu-20.04
8585
defaults:
@@ -338,8 +338,8 @@ jobs:
338338
optimum-cli export openvino --trust-remote-code --weight-format fp16 --model Qwen/Qwen1.5-7B-Chat Qwen1.5-7B-Chat
339339
- run: >
340340
. ./ov/setupvars.sh
341-
&& timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./Qwen1.5-7B-Chat/ "你好!"
342-
| diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./Qwen1.5-7B-Chat/ "你好!") -
341+
&& timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./Qwen1.5-7B-Chat/ "你好!"
342+
| diff <(timeout 50s ./samples/python/text_generation/beam_search_causal_lm.py ./Qwen1.5-7B-Chat/ "你好!") -
343343
env:
344344
PYTHONPATH: "./build"
345345
@@ -373,8 +373,8 @@ jobs:
373373
optimum-cli export openvino --trust-remote-code --weight-format fp16 --model microsoft/phi-2 phi-2
374374
- run: >
375375
. ./ov/setupvars.sh
376-
&& timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./phi-2/ 69
377-
| diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./phi-2/ 69) -
376+
&& timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./phi-2/ 69
377+
| diff <(timeout 50s ./samples/python/text_generation/beam_search_causal_lm.py ./phi-2/ 69) -
378378
env:
379379
PYTHONPATH: "./build"
380380
@@ -408,8 +408,8 @@ jobs:
408408
optimum-cli export openvino --trust-remote-code --weight-format fp16 --model argilla/notus-7b-v1 notus-7b-v1
409409
- run: >
410410
. ./ov/setupvars.sh
411-
&& timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./notus-7b-v1/ 69
412-
| diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./notus-7b-v1/ 69) -
411+
&& timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./notus-7b-v1/ 69
412+
| diff <(timeout 50s ./samples/python/text_generation/beam_search_causal_lm.py ./notus-7b-v1/ 69) -
413413
env:
414414
PYTHONPATH: "./build"
415415
@@ -445,9 +445,9 @@ jobs:
445445
- name: run and compare
446446
run: |
447447
source ./ov/setupvars.sh
448-
./build/samples/cpp/speculative_decoding_lm/speculative_decoding_lm ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_speculative.txt
448+
./build/samples/cpp/text_generation/speculative_decoding_lm ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_speculative.txt
449449
./build/samples/cpp/text_generation/greedy_causal_lm ./dolly-v2-7b/ "Alan Turing was a" > predictions_greedy.txt
450-
python ./samples/python/speculative_decoding_lm/speculative_decoding_lm.py ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_py.txt
450+
python ./samples/python/text_generation/speculative_decoding_lm.py ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_py.txt
451451
python -c "
452452
with open('predictions_greedy.txt', 'r') as f:
453453
predicted_greedy = f.readline()
@@ -502,9 +502,9 @@ jobs:
502502
Question: Can you please add 2 and 3
503503
A:' > ./prompt.txt
504504
505-
./build/samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_prompt_lookup.txt
505+
./build/samples/cpp/text_generation/prompt_lookup_decoding_lm ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_prompt_lookup.txt
506506
./build/samples/cpp/text_generation/greedy_causal_lm ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_greedy.txt
507-
python ./samples/python/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm.py ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_py.txt
507+
python ./samples/python/text_generation/prompt_lookup_decoding_lm.py ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_py.txt
508508
python -c "
509509
with open('predictions_greedy.txt', 'r') as f:
510510
predicted_greedy = f.readline()
@@ -664,7 +664,7 @@ jobs:
664664
run: |
665665
source ./ov/setupvars.sh
666666
printf 'What is 2 + 2?\nWhat is the previous answer?\nAdd 1 to it.\nSubtract 5 from it.\nWhy is the sun yellow?\nWhat was my first question?\n' > ./input.txt
667-
timeout 30s ./build/samples/cpp/chat_sample/chat_sample ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred.txt
667+
timeout 30s ./build/samples/cpp/text_generation/chat_sample ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred.txt
668668
python -c "
669669
from transformers import AutoTokenizer, AutoModelForCausalLM
670670
model_id = 'TinyLlama/TinyLlama-1.1B-Chat-v1.0'
@@ -693,7 +693,7 @@ jobs:
693693
"
694694
diff pred.txt ref.txt
695695
echo "Chat sample cpp" passed
696-
timeout 30s ./samples/python/chat_sample/chat_sample.py ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred2.txt
696+
timeout 30s ./samples/python/text_generation/chat_sample.py ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred2.txt
697697
diff pred2.txt ref.txt
698698
echo "Chat sample python" passed
699699

.github/workflows/linux.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,7 @@ jobs:
374374
- name: Test multinomial_causal_lm.py
375375
if: ${{ 'Release' == matrix.build-type }} # Python bindings can be built in Release only
376376
timeout-minutes: 1
377-
run: ${{ env.INSTALL_DIR }}/samples/python/multinomial_causal_lm/multinomial_causal_lm.py ./TinyLlama-1.1B-Chat-v1.0/ 0
377+
run: ${{ env.INSTALL_DIR }}/samples/python/text_generation/multinomial_causal_lm.py ./TinyLlama-1.1B-Chat-v1.0/ 0
378378
working-directory: ${{ env.MODELS_DIR }}
379379

380380
- name: Test whisper_speech_recognition.py

.github/workflows/mac.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ jobs:
395395
if: ${{ 'Release' == matrix.build-type }} # Python bindings can be built in Release only
396396
run: |
397397
source ${OV_INSTALL_DIR}/setupvars.sh
398-
${OV_INSTALL_DIR}/samples/python/multinomial_causal_lm/multinomial_causal_lm.py ./TinyLlama-1.1B-Chat-v1.0/ 0
398+
${OV_INSTALL_DIR}/samples/python/text_generation/multinomial_causal_lm.py ./TinyLlama-1.1B-Chat-v1.0/ 0
399399
timeout-minutes: 1
400400

401401
- name: Test python samples (whisper_speech_recognition)

.github/workflows/windows.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -470,7 +470,7 @@ jobs:
470470
if: ${{ 'Release' == matrix.build-type }} # Python bindings can be built in Release only
471471
run: |
472472
. "${{ env.OV_INSTALL_DIR }}/setupvars.ps1"
473-
python ${{ env.OV_INSTALL_DIR }}\samples\python\multinomial_causal_lm\multinomial_causal_lm.py TinyLlama-1.1B-Chat-v1.0 0
473+
python ${{ env.OV_INSTALL_DIR }}\samples\python\text_generation\multinomial_causal_lm.py TinyLlama-1.1B-Chat-v1.0 0
474474
475475
- name: Test python samples (whisper_speech_recognition)
476476
if: ${{ 'Release' == matrix.build-type }} # Python bindings can be built in Release only

samples/CMakeLists.txt

-19
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,7 @@
22
# SPDX-License-Identifier: Apache-2.0
33
#
44

5-
add_subdirectory(cpp/beam_search_causal_lm)
6-
add_subdirectory(cpp/benchmark_genai)
7-
add_subdirectory(cpp/chat_sample)
85
add_subdirectory(cpp/text_generation)
9-
add_subdirectory(cpp/lora_greedy_causal_lm)
10-
add_subdirectory(cpp/multinomial_causal_lm)
11-
add_subdirectory(cpp/prompt_lookup_decoding_lm)
12-
add_subdirectory(cpp/speculative_decoding_lm)
136
add_subdirectory(cpp/image_generation)
147
add_subdirectory(cpp/visual_language_chat)
158
add_subdirectory(cpp/whisper_speech_recognition)
@@ -22,27 +15,15 @@ install(FILES
2215
COMPONENT cpp_samples_genai)
2316

2417
install(DIRECTORY
25-
cpp/beam_search_causal_lm
26-
cpp/benchmark_genai
27-
cpp/chat_sample
2818
cpp/text_generation
2919
cpp/image_generation
30-
cpp/lora_greedy_causal_lm
31-
cpp/multinomial_causal_lm
32-
# Don't install prompt_lookup_decoding_lm because it doesn't use openvino_genai library and is not verified yet.
33-
cpp/speculative_decoding_lm
3420
cpp/visual_language_chat
3521
cpp/whisper_speech_recognition
3622
DESTINATION samples/cpp COMPONENT cpp_samples_genai)
3723

3824
install(DIRECTORY
39-
python/beam_search_causal_lm
40-
python/benchmark_genai
41-
python/chat_sample
4225
python/text_generation
4326
python/image_generation
44-
python/multinomial_causal_lm
45-
python/speculative_decoding_lm
4627
python/visual_language_chat
4728
python/whisper_speech_recognition
4829
DESTINATION samples/python COMPONENT cpp_samples_genai

samples/cpp/beam_search_causal_lm/CMakeLists.txt

-22
This file was deleted.

samples/cpp/beam_search_causal_lm/README.md

-38
This file was deleted.

samples/cpp/benchmark_genai/CMakeLists.txt

-32
This file was deleted.

samples/cpp/benchmark_genai/README.md

-49
This file was deleted.

samples/cpp/chat_sample/CMakeLists.txt

-22
This file was deleted.

samples/cpp/chat_sample/README.md

-46
This file was deleted.

0 commit comments

Comments
 (0)