2023-12-22 17:51:53 +00:00
|
|
|
|
|
|
|
set(TARGET ext_server)
|
|
|
|
option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON)
|
2024-01-13 00:28:00 +00:00
|
|
|
if (WIN32)
|
2024-03-12 20:51:44 +00:00
|
|
|
add_library(${TARGET} SHARED ext_server.cpp ../llama.cpp/llama.cpp)
|
2024-01-13 00:28:00 +00:00
|
|
|
else()
|
2024-03-12 20:51:44 +00:00
|
|
|
add_library(${TARGET} STATIC ext_server.cpp ../llama.cpp/llama.cpp)
|
2024-01-13 00:28:00 +00:00
|
|
|
endif()
|
2023-12-22 17:51:53 +00:00
|
|
|
target_compile_features(${TARGET} PRIVATE cxx_std_11)
|
|
|
|
target_compile_definitions(${TARGET} PUBLIC LLAMA_SERVER_LIBRARY=1)
|
2024-01-13 00:28:00 +00:00
|
|
|
target_link_libraries(${TARGET} PRIVATE ggml llava common )
|
|
|
|
set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
|
|
|
|
target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$<BOOL:${LLAMA_SERVER_VERBOSE}>)
|
|
|
|
install(TARGETS ext_server LIBRARY)
|
2023-12-22 17:51:53 +00:00
|
|
|
|
|
|
|
if (CUDAToolkit_FOUND)
|
|
|
|
target_include_directories(${TARGET} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
|
|
|
|
if (WIN32)
|
2024-01-13 00:28:00 +00:00
|
|
|
target_link_libraries(${TARGET} PRIVATE nvml)
|
2023-12-22 17:51:53 +00:00
|
|
|
endif()
|
|
|
|
endif()
|