2023-09-16 18:57:49 +00:00
|
|
|
cmake_minimum_required(VERSION 3.21)
|
2023-03-23 17:54:14 +00:00
|
|
|
|
|
|
|
project(llama_cpp)
|
|
|
|
|
2023-09-12 22:29:18 +00:00
|
|
|
option(LLAMA_BUILD "Build llama.cpp shared library and install alongside python package" ON)
|
2023-04-25 05:36:37 +00:00
|
|
|
|
2023-09-12 22:29:18 +00:00
|
|
|
if (LLAMA_BUILD)
|
2023-04-08 06:39:17 +00:00
|
|
|
set(BUILD_SHARED_LIBS "On")
|
2023-11-10 21:32:36 +00:00
|
|
|
|
|
|
|
# Building llama
|
2023-09-14 22:25:44 +00:00
|
|
|
if (APPLE AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm64")
|
|
|
|
# Need to disable these llama.cpp flags on Apple x86_64,
|
2023-09-12 22:41:00 +00:00
|
|
|
# otherwise users may encounter invalid instruction errors
|
2023-09-12 22:37:32 +00:00
|
|
|
set(LLAMA_AVX "Off" CACHE BOOL "llama: enable AVX" FORCE)
|
|
|
|
set(LLAMA_AVX2 "Off" CACHE BOOL "llama: enable AVX2" FORCE)
|
|
|
|
set(LLAMA_FMA "Off" CACHE BOOL "llama: enable FMA" FORCE)
|
|
|
|
set(LLAMA_F16C "Off" CACHE BOOL "llama: enable F16C" FORCE)
|
2023-09-12 22:25:31 +00:00
|
|
|
endif()
|
2023-09-12 22:29:18 +00:00
|
|
|
add_subdirectory(vendor/llama.cpp)
|
2023-04-08 06:39:17 +00:00
|
|
|
install(
|
|
|
|
TARGETS llama
|
2023-07-18 22:52:29 +00:00
|
|
|
LIBRARY DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
RUNTIME DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
ARCHIVE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
FRAMEWORK DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
RESOURCE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
)
|
|
|
|
# Temporary fix for https://github.com/scikit-build/scikit-build-core/issues/374
|
|
|
|
install(
|
|
|
|
TARGETS llama
|
|
|
|
LIBRARY DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
RUNTIME DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
ARCHIVE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
FRAMEWORK DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
RESOURCE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
2023-04-08 06:39:17 +00:00
|
|
|
)
|
2023-09-16 18:57:49 +00:00
|
|
|
# Workaround for Windows + CUDA https://github.com/abetlen/llama-cpp-python/issues/563
|
|
|
|
install(
|
|
|
|
FILES $<TARGET_RUNTIME_DLLS:llama>
|
|
|
|
DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
)
|
|
|
|
install(
|
|
|
|
FILES $<TARGET_RUNTIME_DLLS:llama>
|
|
|
|
DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
)
|
2023-11-10 21:32:36 +00:00
|
|
|
|
|
|
|
# Building llava
|
2023-11-08 03:48:51 +00:00
|
|
|
add_subdirectory(vendor/llama.cpp/examples/llava)
|
|
|
|
set_target_properties(llava_shared PROPERTIES OUTPUT_NAME "llava")
|
2023-11-10 21:32:36 +00:00
|
|
|
# Set CUDA_ARCHITECTURES to OFF on windows
|
|
|
|
if (WIN32)
|
|
|
|
set_target_properties(llava_shared PROPERTIES CUDA_ARCHITECTURES OFF)
|
|
|
|
endif()
|
2023-11-08 03:48:51 +00:00
|
|
|
install(
|
|
|
|
TARGETS llava_shared
|
|
|
|
LIBRARY DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
RUNTIME DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
ARCHIVE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
FRAMEWORK DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
RESOURCE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
|
|
|
)
|
|
|
|
# Temporary fix for https://github.com/scikit-build/scikit-build-core/issues/374
|
|
|
|
install(
|
|
|
|
TARGETS llava_shared
|
|
|
|
LIBRARY DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
RUNTIME DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
ARCHIVE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
FRAMEWORK DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
RESOURCE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
|
|
|
)
|
2023-05-16 13:07:14 +00:00
|
|
|
endif()
|