diff --git a/.github/workflows/causal_lm_cpp.yml b/.github/workflows/causal_lm_cpp.yml index 2e9d72e263..0c12d2188c 100644 --- a/.github/workflows/causal_lm_cpp.yml +++ b/.github/workflows/causal_lm_cpp.yml @@ -53,7 +53,7 @@ jobs: wget https://huggingface.co/smangrul/tinyllama_lora_sql/resolve/main/adapter_model.safetensors?download=true -O adapter_model.safetensors - run: > . ./ov/setupvars.sh - && timeout 25s ./build/samples/cpp/multinomial_causal_lm/multinomial_causal_lm ./open_llama_3b_v2/ a + && timeout 25s ./build/samples/cpp/text_generation/multinomial_causal_lm ./open_llama_3b_v2/ a env: PYTHONPATH: "./build" - run: > @@ -78,7 +78,7 @@ jobs: matrix: executable: [ - ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm, + ./build/samples/cpp/text_generation/beam_search_causal_lm, python ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py, ] runs-on: ubuntu-20.04 @@ -338,7 +338,7 @@ jobs: optimum-cli export openvino --trust-remote-code --weight-format fp16 --model Qwen/Qwen1.5-7B-Chat Qwen1.5-7B-Chat - run: > . ./ov/setupvars.sh - && timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./Qwen1.5-7B-Chat/ "你好!" + && timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./Qwen1.5-7B-Chat/ "你好!" | diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./Qwen1.5-7B-Chat/ "你好!") - env: PYTHONPATH: "./build" @@ -373,7 +373,7 @@ jobs: optimum-cli export openvino --trust-remote-code --weight-format fp16 --model microsoft/phi-2 phi-2 - run: > . ./ov/setupvars.sh - && timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./phi-2/ 69 + && timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./phi-2/ 69 | diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./phi-2/ 69) - env: PYTHONPATH: "./build" @@ -408,7 +408,7 @@ jobs: optimum-cli export openvino --trust-remote-code --weight-format fp16 --model argilla/notus-7b-v1 notus-7b-v1 - run: > . ./ov/setupvars.sh - && timeout 50s ./build/samples/cpp/beam_search_causal_lm/beam_search_causal_lm ./notus-7b-v1/ 69 + && timeout 50s ./build/samples/cpp/text_generation/beam_search_causal_lm ./notus-7b-v1/ 69 | diff <(timeout 50s ./samples/python/beam_search_causal_lm/beam_search_causal_lm.py ./notus-7b-v1/ 69) - env: PYTHONPATH: "./build" @@ -445,7 +445,7 @@ jobs: - name: run and compare run: | source ./ov/setupvars.sh - ./build/samples/cpp/speculative_decoding_lm/speculative_decoding_lm ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_speculative.txt + ./build/samples/cpp/text_generation/speculative_decoding_lm ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_speculative.txt ./build/samples/cpp/text_generation/greedy_causal_lm ./dolly-v2-7b/ "Alan Turing was a" > predictions_greedy.txt python ./samples/python/speculative_decoding_lm/speculative_decoding_lm.py ./dolly-v2-7b/ ./dolly-v2-3b/ "Alan Turing was a" > predictions_py.txt python -c " @@ -502,7 +502,7 @@ jobs: Question: Can you please add 2 and 3 A:' > ./prompt.txt - ./build/samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm ./TinyLlama-1.1B-Chat-v1.0/ "$( predictions_prompt_lookup.txt + ./build/samples/cpp/text_generation/prompt_lookup_decoding_lm ./TinyLlama-1.1B-Chat-v1.0/ "$( predictions_prompt_lookup.txt ./build/samples/cpp/text_generation/greedy_causal_lm ./TinyLlama-1.1B-Chat-v1.0/ "$( predictions_greedy.txt python ./samples/python/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm.py ./TinyLlama-1.1B-Chat-v1.0/ "$( predictions_py.txt python -c " @@ -664,7 +664,7 @@ jobs: run: | source ./ov/setupvars.sh printf 'What is 2 + 2?\nWhat is the previous answer?\nAdd 1 to it.\nSubtract 5 from it.\nWhy is the sun yellow?\nWhat was my first question?\n' > ./input.txt - timeout 30s ./build/samples/cpp/chat_sample/chat_sample ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred.txt + timeout 30s ./build/samples/cpp/text_generation/chat_sample ./TinyLlama-1.1B-Chat-v1.0/ < input.txt > ./pred.txt python -c " from transformers import AutoTokenizer, AutoModelForCausalLM model_id = 'TinyLlama/TinyLlama-1.1B-Chat-v1.0' diff --git a/samples/CMakeLists.txt b/samples/CMakeLists.txt index 02539df6e7..619eebf0ef 100644 --- a/samples/CMakeLists.txt +++ b/samples/CMakeLists.txt @@ -2,14 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 # -add_subdirectory(cpp/beam_search_causal_lm) add_subdirectory(cpp/benchmark_genai) -add_subdirectory(cpp/chat_sample) add_subdirectory(cpp/text_generation) -add_subdirectory(cpp/lora_greedy_causal_lm) -add_subdirectory(cpp/multinomial_causal_lm) -add_subdirectory(cpp/prompt_lookup_decoding_lm) -add_subdirectory(cpp/speculative_decoding_lm) add_subdirectory(cpp/image_generation) add_subdirectory(cpp/visual_language_chat) add_subdirectory(cpp/whisper_speech_recognition) @@ -22,15 +16,9 @@ install(FILES COMPONENT cpp_samples_genai) install(DIRECTORY - cpp/beam_search_causal_lm cpp/benchmark_genai - cpp/chat_sample cpp/text_generation cpp/image_generation - cpp/lora_greedy_causal_lm - cpp/multinomial_causal_lm - # Don't install prompt_lookup_decoding_lm because it doesn't use openvino_genai library and is not verified yet. - cpp/speculative_decoding_lm cpp/visual_language_chat cpp/whisper_speech_recognition DESTINATION samples/cpp COMPONENT cpp_samples_genai) diff --git a/samples/cpp/beam_search_causal_lm/CMakeLists.txt b/samples/cpp/beam_search_causal_lm/CMakeLists.txt deleted file mode 100644 index 9bf1a8aac8..0000000000 --- a/samples/cpp/beam_search_causal_lm/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2023-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED - HINTS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) - -add_executable(beam_search_causal_lm beam_search_causal_lm.cpp) -target_link_libraries(beam_search_causal_lm PRIVATE openvino::genai) -set_target_properties(beam_search_causal_lm PROPERTIES - COMPILE_PDB_NAME beam_search_causal_lm - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) -target_compile_features(beam_search_causal_lm PRIVATE cxx_std_11) - -install(TARGETS beam_search_causal_lm - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/chat_sample/CMakeLists.txt b/samples/cpp/chat_sample/CMakeLists.txt deleted file mode 100644 index 69578dc86c..0000000000 --- a/samples/cpp/chat_sample/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2023-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED - PATHS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) - -add_executable(chat_sample chat_sample.cpp) -target_link_libraries(chat_sample PRIVATE openvino::genai) -set_target_properties(chat_sample PROPERTIES - COMPILE_PDB_NAME chat_sample - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) -target_compile_features(chat_sample PRIVATE cxx_std_11) - -install(TARGETS chat_sample - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/lora_greedy_causal_lm/CMakeLists.txt b/samples/cpp/lora_greedy_causal_lm/CMakeLists.txt deleted file mode 100644 index 1d3f6307c0..0000000000 --- a/samples/cpp/lora_greedy_causal_lm/CMakeLists.txt +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2023-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED PATHS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) -add_executable(lora_greedy_causal_lm lora_greedy_causal_lm.cpp) -target_link_libraries(lora_greedy_causal_lm PRIVATE openvino::genai) -set_target_properties(lora_greedy_causal_lm PROPERTIES - COMPILE_PDB_NAME lora_greedy_causal_lm - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) -target_compile_features(lora_greedy_causal_lm PRIVATE cxx_std_11) -install(TARGETS lora_greedy_causal_lm - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/multinomial_causal_lm/CMakeLists.txt b/samples/cpp/multinomial_causal_lm/CMakeLists.txt deleted file mode 100644 index 83b2335431..0000000000 --- a/samples/cpp/multinomial_causal_lm/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2023-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED - PATHS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) - -add_executable(multinomial_causal_lm multinomial_causal_lm.cpp) -target_link_libraries(multinomial_causal_lm PRIVATE openvino::genai) -set_target_properties(multinomial_causal_lm PROPERTIES - COMPILE_PDB_NAME multinomial_causal_lm - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) -target_compile_features(multinomial_causal_lm PRIVATE cxx_std_11) - -install(TARGETS multinomial_causal_lm - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/prompt_lookup_decoding_lm/CMakeLists.txt b/samples/cpp/prompt_lookup_decoding_lm/CMakeLists.txt deleted file mode 100644 index b0ce8b1b60..0000000000 --- a/samples/cpp/prompt_lookup_decoding_lm/CMakeLists.txt +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) 2023-2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED - PATHS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) - -set(TARGET_NAME prompt_lookup_decoding_lm) -add_executable(${TARGET_NAME} ${TARGET_NAME}.cpp) -target_link_libraries(${TARGET_NAME} PRIVATE openvino::genai) - -set_target_properties(${TARGET_NAME} PROPERTIES - COMPILE_PDB_NAME ${TARGET_NAME} - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) - -install(TARGETS ${TARGET_NAME} - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/speculative_decoding_lm/CMakeLists.txt b/samples/cpp/speculative_decoding_lm/CMakeLists.txt deleted file mode 100644 index 7c48b6cc0b..0000000000 --- a/samples/cpp/speculative_decoding_lm/CMakeLists.txt +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -find_package(OpenVINOGenAI REQUIRED - PATHS - "${CMAKE_BINARY_DIR}" # Reuse the package from the build. - ${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO. - NO_CMAKE_FIND_ROOT_PATH -) - -set(TARGET_NAME speculative_decoding_lm) -add_executable(${TARGET_NAME} ${TARGET_NAME}.cpp) -target_link_libraries(${TARGET_NAME} PRIVATE openvino::genai) - -set_target_properties(${TARGET_NAME} PROPERTIES - COMPILE_PDB_NAME ${TARGET_NAME} - # Ensure out of box LC_RPATH on macOS with SIP - INSTALL_RPATH_USE_LINK_PATH ON) - -install(TARGETS ${TARGET_NAME} - RUNTIME DESTINATION samples_bin/ - COMPONENT samples_bin - EXCLUDE_FROM_ALL) diff --git a/samples/cpp/text_generation/CMakeLists.txt b/samples/cpp/text_generation/CMakeLists.txt index 377682974e..d262e57e2f 100644 --- a/samples/cpp/text_generation/CMakeLists.txt +++ b/samples/cpp/text_generation/CMakeLists.txt @@ -8,6 +8,8 @@ find_package(OpenVINOGenAI REQUIRED NO_CMAKE_FIND_ROOT_PATH ) + +# greedy_causal_lm add_executable(greedy_causal_lm greedy_causal_lm.cpp) target_link_libraries(greedy_causal_lm PRIVATE openvino::genai) set_target_properties(greedy_causal_lm PROPERTIES @@ -15,12 +17,13 @@ set_target_properties(greedy_causal_lm PROPERTIES # Ensure out of box LC_RPATH on macOS with SIP INSTALL_RPATH_USE_LINK_PATH ON) target_compile_features(greedy_causal_lm PRIVATE cxx_std_11) - install(TARGETS greedy_causal_lm RUNTIME DESTINATION samples_bin/ COMPONENT samples_bin EXCLUDE_FROM_ALL) + +# encrypted_model_causal_lm add_executable(encrypted_model_causal_lm encrypted_model_causal_lm.cpp) target_link_libraries(encrypted_model_causal_lm PRIVATE openvino::genai) set_target_properties(encrypted_model_causal_lm PROPERTIES @@ -28,8 +31,94 @@ set_target_properties(encrypted_model_causal_lm PROPERTIES # Ensure out of box LC_RPATH on macOS with SIP INSTALL_RPATH_USE_LINK_PATH ON) target_compile_features(encrypted_model_causal_lm PRIVATE cxx_std_11) - install(TARGETS encrypted_model_causal_lm RUNTIME DESTINATION samples_bin/ COMPONENT samples_bin EXCLUDE_FROM_ALL) + + +# beam_search_causal_lm +add_executable(beam_search_causal_lm beam_search_causal_lm.cpp) +target_link_libraries(beam_search_causal_lm PRIVATE openvino::genai) +set_target_properties(beam_search_causal_lm PROPERTIES + COMPILE_PDB_NAME beam_search_causal_lm + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) +target_compile_features(beam_search_causal_lm PRIVATE cxx_std_11) +install(TARGETS beam_search_causal_lm + RUNTIME DESTINATION samples_bin/ + COMPONENT samples_bin + EXCLUDE_FROM_ALL) + + +# chat_sample +add_executable(chat_sample chat_sample.cpp) +target_link_libraries(chat_sample PRIVATE openvino::genai) +set_target_properties(chat_sample PROPERTIES + COMPILE_PDB_NAME chat_sample + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) +target_compile_features(chat_sample PRIVATE cxx_std_11) +install(TARGETS chat_sample + RUNTIME DESTINATION samples_bin/ + COMPONENT samples_bin + EXCLUDE_FROM_ALL) + + +# lora_greedy_causal_lm +add_executable(lora_greedy_causal_lm lora_greedy_causal_lm.cpp) +target_link_libraries(lora_greedy_causal_lm PRIVATE openvino::genai) +set_target_properties(lora_greedy_causal_lm PROPERTIES + COMPILE_PDB_NAME lora_greedy_causal_lm + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) +target_compile_features(lora_greedy_causal_lm PRIVATE cxx_std_11) +install(TARGETS lora_greedy_causal_lm + RUNTIME DESTINATION samples_bin/ + COMPONENT samples_bin + EXCLUDE_FROM_ALL) + + +# multinomial_causal_lm +add_executable(multinomial_causal_lm multinomial_causal_lm.cpp) +target_link_libraries(multinomial_causal_lm PRIVATE openvino::genai) +set_target_properties(multinomial_causal_lm PROPERTIES + COMPILE_PDB_NAME multinomial_causal_lm + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) +target_compile_features(multinomial_causal_lm PRIVATE cxx_std_11) + +install(TARGETS multinomial_causal_lm + RUNTIME DESTINATION samples_bin/ + COMPONENT samples_bin + EXCLUDE_FROM_ALL) + + +# prompt_lookup_decoding_lm +add_executable(prompt_lookup_decoding_lm prompt_lookup_decoding_lm.cpp) +target_link_libraries(prompt_lookup_decoding_lm PRIVATE openvino::genai) + +set_target_properties(prompt_lookup_decoding_lm PROPERTIES + COMPILE_PDB_NAME prompt_lookup_decoding_lm + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) +# Don't install prompt_lookup_decoding_lm because it doesn't use openvino_genai library and is not verified yet. +# install(TARGETS prompt_lookup_decoding_lm +# RUNTIME DESTINATION samples_bin/ +# COMPONENT samples_bin +# EXCLUDE_FROM_ALL) + + +# speculative_decoding_lm +add_executable(speculative_decoding_lm speculative_decoding_lm.cpp) +target_link_libraries(speculative_decoding_lm PRIVATE openvino::genai) + +set_target_properties(speculative_decoding_lm PROPERTIES + COMPILE_PDB_NAME speculative_decoding_lm + # Ensure out of box LC_RPATH on macOS with SIP + INSTALL_RPATH_USE_LINK_PATH ON) + +install(TARGETS speculative_decoding_lm + RUNTIME DESTINATION samples_bin/ + COMPONENT samples_bin + EXCLUDE_FROM_ALL) diff --git a/samples/cpp/beam_search_causal_lm/beam_search_causal_lm.cpp b/samples/cpp/text_generation/beam_search_causal_lm.cpp similarity index 100% rename from samples/cpp/beam_search_causal_lm/beam_search_causal_lm.cpp rename to samples/cpp/text_generation/beam_search_causal_lm.cpp diff --git a/samples/cpp/chat_sample/chat_sample.cpp b/samples/cpp/text_generation/chat_sample.cpp similarity index 100% rename from samples/cpp/chat_sample/chat_sample.cpp rename to samples/cpp/text_generation/chat_sample.cpp diff --git a/samples/cpp/lora_greedy_causal_lm/lora_greedy_causal_lm.cpp b/samples/cpp/text_generation/lora_greedy_causal_lm.cpp similarity index 100% rename from samples/cpp/lora_greedy_causal_lm/lora_greedy_causal_lm.cpp rename to samples/cpp/text_generation/lora_greedy_causal_lm.cpp diff --git a/samples/cpp/multinomial_causal_lm/multinomial_causal_lm.cpp b/samples/cpp/text_generation/multinomial_causal_lm.cpp similarity index 100% rename from samples/cpp/multinomial_causal_lm/multinomial_causal_lm.cpp rename to samples/cpp/text_generation/multinomial_causal_lm.cpp diff --git a/samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm.cpp b/samples/cpp/text_generation/prompt_lookup_decoding_lm.cpp similarity index 100% rename from samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm.cpp rename to samples/cpp/text_generation/prompt_lookup_decoding_lm.cpp diff --git a/samples/cpp/speculative_decoding_lm/speculative_decoding_lm.cpp b/samples/cpp/text_generation/speculative_decoding_lm.cpp similarity index 100% rename from samples/cpp/speculative_decoding_lm/speculative_decoding_lm.cpp rename to samples/cpp/text_generation/speculative_decoding_lm.cpp