diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 867a589ce..3c9804d43 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -687,8 +687,8 @@ jobs: strategy: matrix: include: - - build: 'cpu-x64' - defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF' + - build: 'cpu-x64 (static)' + defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF' - build: 'openblas-x64' defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"' - build: 'vulkan-x64' diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 83f7d1a45..2f7bad2cf 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -104,8 +104,8 @@ if (LLAMA_LLGUIDANCE) llama_build_and_test(test-grammar-llguidance.cpp ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-bpe.gguf) endif () -if (NOT WIN32) - # these tests are disabled on Windows because they use internal functions not exported with LLAMA_API +if (NOT WIN32 OR NOT BUILD_SHARED_LIBS) + # these tests are disabled on Windows because they use internal functions not exported with LLAMA_API (when building with shared libraries) llama_build_and_test(test-sampling.cpp) llama_build_and_test(test-grammar-parser.cpp) llama_build_and_test(test-grammar-integration.cpp) diff --git a/tests/test-chat.cpp b/tests/test-chat.cpp index c6d998f10..6ebf1464d 100644 --- a/tests/test-chat.cpp +++ b/tests/test-chat.cpp @@ -7,6 +7,8 @@ // #include "chat.h" +#include "log.h" + #include "../src/unicode.h" #include "../src/llama-grammar.h" @@ -1428,6 +1430,8 @@ static void test_msg_diffs_compute() { } int main(int argc, char ** argv) { + common_log_set_verbosity_thold(999); + // try { #ifndef _WIN32 if (argc > 1) {