2023-12-22 17:51:53 +00:00
|
|
|
# Ollama specific CMakefile to include in llama.cpp/examples/server
|
|
|
|
|
|
|
|
set(TARGET ext_server)
|
|
|
|
option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON)
|
2024-01-13 00:28:00 +00:00
|
|
|
if (WIN32)
|
|
|
|
add_library(${TARGET} SHARED ../../../ext_server/ext_server.cpp ../../llama.cpp)
|
|
|
|
else()
|
|
|
|
add_library(${TARGET} STATIC ../../../ext_server/ext_server.cpp ../../llama.cpp)
|
|
|
|
endif()
|
2023-12-22 17:51:53 +00:00
|
|
|
target_include_directories(${TARGET} PRIVATE ../../common)
|
|
|
|
target_include_directories(${TARGET} PRIVATE ../..)
|
|
|
|
target_include_directories(${TARGET} PRIVATE ../../..)
|
|
|
|
target_compile_features(${TARGET} PRIVATE cxx_std_11)
|
|
|
|
target_compile_definitions(${TARGET} PUBLIC LLAMA_SERVER_LIBRARY=1)
|
2024-01-13 00:28:00 +00:00
|
|
|
target_link_libraries(${TARGET} PRIVATE ggml llava common )
|
|
|
|
set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
|
|
|
|
target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$<BOOL:${LLAMA_SERVER_VERBOSE}>)
|
|
|
|
install(TARGETS ext_server LIBRARY)
|
2023-12-22 17:51:53 +00:00
|
|
|
|
|
|
|
if (CUDAToolkit_FOUND)
|
|
|
|
target_include_directories(${TARGET} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
|
|
|
|
if (WIN32)
|
2024-01-13 00:28:00 +00:00
|
|
|
target_link_libraries(${TARGET} PRIVATE nvml)
|
2023-12-22 17:51:53 +00:00
|
|
|
endif()
|
|
|
|
endif()
|