llama.cpp/CMakeLists.txt

37 lines
1.5 KiB
Text
Raw Normal View History

2023-03-23 17:54:14 +00:00
cmake_minimum_required(VERSION 3.4...3.22)
project(llama_cpp)
2023-09-12 22:29:18 +00:00
option(LLAMA_BUILD "Build llama.cpp shared library and install alongside python package" ON)
2023-04-25 05:36:37 +00:00
2023-09-12 21:58:47 +00:00
2023-09-12 22:29:18 +00:00
if (LLAMA_BUILD)
set(BUILD_SHARED_LIBS "On")
2023-09-14 22:25:44 +00:00
if (APPLE AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm64")
# Need to disable these llama.cpp flags on Apple x86_64,
2023-09-12 22:41:00 +00:00
# otherwise users may encounter invalid instruction errors
2023-09-12 22:37:32 +00:00
set(LLAMA_AVX "Off" CACHE BOOL "llama: enable AVX" FORCE)
set(LLAMA_AVX2 "Off" CACHE BOOL "llama: enable AVX2" FORCE)
set(LLAMA_FMA "Off" CACHE BOOL "llama: enable FMA" FORCE)
set(LLAMA_F16C "Off" CACHE BOOL "llama: enable F16C" FORCE)
2023-09-12 22:25:31 +00:00
endif()
2023-09-12 22:29:18 +00:00
add_subdirectory(vendor/llama.cpp)
install(
TARGETS llama
LIBRARY DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
RUNTIME DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
ARCHIVE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
FRAMEWORK DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
RESOURCE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
)
# Temporary fix for https://github.com/scikit-build/scikit-build-core/issues/374
install(
TARGETS llama
LIBRARY DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
RUNTIME DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
ARCHIVE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
FRAMEWORK DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
RESOURCE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
)
2023-05-16 13:07:14 +00:00
endif()