diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1193779d0bc..b96a37c111e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -79,8 +79,6 @@ jobs: cmake -B build \ -DCMAKE_BUILD_RPATH="@loader_path" \ -DLLAMA_FATAL_WARNINGS=ON \ - -DLLAMA_CURL=OFF \ - -DLLAMA_BUILD_BORINGSSL=ON \ -DGGML_METAL_USE_BF16=ON \ -DGGML_METAL_EMBED_LIBRARY=OFF \ -DGGML_METAL_SHADER_DEBUG=ON \ @@ -118,8 +116,6 @@ jobs: cmake -B build \ -DCMAKE_BUILD_RPATH="@loader_path" \ -DLLAMA_FATAL_WARNINGS=ON \ - -DLLAMA_CURL=OFF \ - -DLLAMA_BUILD_BORINGSSL=ON \ -DGGML_METAL=OFF \ -DGGML_RPC=ON \ -DCMAKE_OSX_DEPLOYMENT_TARGET=13.3 @@ -1042,8 +1038,7 @@ jobs: - name: Build id: cmake_build run: | - cmake -S . -B build ${{ matrix.defines }} ` - -DLLAMA_CURL=OFF -DLLAMA_BUILD_BORINGSSL=ON + cmake -S . -B build ${{ matrix.defines }} cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} - name: Add libopenblas.dll @@ -1150,8 +1145,6 @@ jobs: call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x64 cmake -S . -B build -G "Ninja Multi-Config" ^ -DLLAMA_BUILD_SERVER=ON ^ - -DLLAMA_CURL=OFF ^ - -DLLAMA_BUILD_BORINGSSL=ON ^ -DGGML_NATIVE=OFF ^ -DGGML_BACKEND_DL=ON ^ -DGGML_CPU_ALL_VARIANTS=ON ^ @@ -1258,8 +1251,6 @@ jobs: -DCMAKE_CXX_COMPILER="${env:HIP_PATH}\bin\clang++.exe" ` -DCMAKE_CXX_FLAGS="-I$($PWD.Path.Replace('\', '/'))/opt/rocm-${{ env.ROCM_VERSION }}/include/" ` -DCMAKE_BUILD_TYPE=Release ` - -DLLAMA_CURL=OFF ` - -DLLAMA_BUILD_BORINGSSL=ON ` -DROCM_DIR="${env:HIP_PATH}" ` -DGGML_HIP=ON ` -DGGML_HIP_ROCWMMA_FATTN=ON ` diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 5694feb2c93..0da882ff3a9 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -72,7 +72,7 @@ jobs: - name: Build id: cmake_build run: | - cmake -B build -DLLAMA_CURL=OFF -DLLAMA_BUILD_BORINGSSL=ON + cmake -B build cmake --build build --config ${{ matrix.build_type }} -j ${env:NUMBER_OF_PROCESSORS} --target llama-server - name: Python setup @@ -108,7 +108,7 @@ jobs: - name: Build id: cmake_build run: | - cmake -B build -DLLAMA_CURL=OFF -DLLAMA_BUILD_BORINGSSL=ON + cmake -B build cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target llama-server - name: Python setup diff --git a/CMakeLists.txt b/CMakeLists.txt index c231ec0e3fa..d7fd41110c9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -111,10 +111,13 @@ option(LLAMA_BUILD_SERVER "llama: build server example" ${LLAMA_STANDALONE}) option(LLAMA_TOOLS_INSTALL "llama: install tools" ${LLAMA_TOOLS_INSTALL_DEFAULT}) # 3rd party libs -option(LLAMA_CURL "llama: use libcurl to download model from an URL" ON) -option(LLAMA_HTTPLIB "llama: if libcurl is disabled, use httplib to download model from an URL" ON) -option(LLAMA_OPENSSL "llama: use openssl to support HTTPS" OFF) -option(LLAMA_LLGUIDANCE "llama-common: include LLGuidance library for structured output in common utils" OFF) +option(LLAMA_CURL "llama: use libcurl to download model from an URL" OFF) +option(LLAMA_HTTPLIB "llama: if libcurl is disabled, use httplib to download model from an URL" ON) +option(LLAMA_HTTPLIB_SSL "llama: build httplib with SSL support" ON) +option(LLAMA_BORINGSSL "llama: use boringssl to support HTTPS" ON) +option(LLAMA_LIBRESSL "llama: use libressl to support HTTPS" OFF) +option(LLAMA_OPENSSL "llama: use openssl to support HTTPS" OFF) +option(LLAMA_LLGUIDANCE "llama-common: include LLGuidance library for structured output in common utils" OFF) # Required for relocatable CMake package include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/build-info.cmake) diff --git a/vendor/cpp-httplib/CMakeLists.txt b/vendor/cpp-httplib/CMakeLists.txt index 8f0d15d1fd3..6951f073a0c 100644 --- a/vendor/cpp-httplib/CMakeLists.txt +++ b/vendor/cpp-httplib/CMakeLists.txt @@ -29,7 +29,11 @@ target_compile_definitions(${TARGET} PRIVATE set(OPENSSL_NO_ASM ON CACHE BOOL "Disable OpenSSL ASM code when building BoringSSL or LibreSSL") -if (LLAMA_BUILD_BORINGSSL) +if (NOT LLAMA_HTTPLIB_SSL) + message(STATUS "Building cpp-httplib without SSL support") + set(CPPHTTPLIB_OPENSSL_SUPPORT FALSE) + +elseif (LLAMA_BORINGSSL) set(FIPS OFF CACHE BOOL "Enable FIPS (BoringSSL)") set(BORINGSSL_GIT "https://boringssl.googlesource.com/boringssl" CACHE STRING "BoringSSL git repository") @@ -54,6 +58,8 @@ if (LLAMA_BUILD_BORINGSSL) set(BUILD_SHARED_LIBS OFF) set(BUILD_TESTING OFF) + message("NOTE: If this fails, set -LLAMA_HTTPLIB_SSL=OFF to disable SSL support in cpp-httplib") + if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.28) FetchContent_MakeAvailable(boringssl) else() @@ -70,7 +76,7 @@ if (LLAMA_BUILD_BORINGSSL) set(CPPHTTPLIB_OPENSSL_SUPPORT TRUE) target_link_libraries(${TARGET} PUBLIC ssl crypto) -elseif (LLAMA_BUILD_LIBRESSL) +elseif (LLAMA_LIBRESSL) set(LIBRESSL_VERSION "4.2.1" CACHE STRING "LibreSSL version") message(STATUS "Fetching LibreSSL version ${LIBRESSL_VERSION}") @@ -95,6 +101,8 @@ elseif (LLAMA_BUILD_LIBRESSL) set(BUILD_SHARED_LIBS OFF) set(BUILD_TESTING OFF) + message("NOTE: If this fails, set -LLAMA_HTTPLIB_SSL=OFF to disable SSL support in cpp-httplib") + if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.28) FetchContent_MakeAvailable(libressl) else() @@ -139,6 +147,10 @@ elseif (LLAMA_OPENSSL) else() message(STATUS "OpenSSL not found, SSL support disabled") endif() + +else() + message(FATAL_ERROR "No SSL backend selected for cpp-httplib") + endif() if (CPPHTTPLIB_OPENSSL_SUPPORT)