fix: Enable CUDA backend for llava. Closes #1324
This commit is contained in:
parent
73165021bb
commit
7f59856fa6
2 changed files with 3 additions and 2 deletions
|
@ -51,8 +51,9 @@ if (LLAMA_BUILD)
|
||||||
)
|
)
|
||||||
|
|
||||||
if (LLAVA_BUILD)
|
if (LLAVA_BUILD)
|
||||||
if (LLAMA_CUBLAS)
|
if (LLAMA_CUBLAS OR LLAMA_CUDA)
|
||||||
add_compile_definitions(GGML_USE_CUBLAS)
|
add_compile_definitions(GGML_USE_CUBLAS)
|
||||||
|
add_compile_definitions(GGML_USE_CUDA)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (LLAMA_METAL)
|
if (LLAMA_METAL)
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -16,7 +16,7 @@ build.debug:
|
||||||
CMAKE_ARGS="-DCMAKE_BUILD_TYPE=Debug" python3 -m pip install --verbose --config-settings=cmake.verbose=true --config-settings=logging.level=INFO --config-settings=install.strip=false --editable .
|
CMAKE_ARGS="-DCMAKE_BUILD_TYPE=Debug" python3 -m pip install --verbose --config-settings=cmake.verbose=true --config-settings=logging.level=INFO --config-settings=install.strip=false --editable .
|
||||||
|
|
||||||
build.cuda:
|
build.cuda:
|
||||||
CMAKE_ARGS="-DLLAMA_CUBLAS=on" python3 -m pip install --verbose -e .
|
CMAKE_ARGS="-DLLAMA_CUDA=on" python3 -m pip install --verbose -e .
|
||||||
|
|
||||||
build.opencl:
|
build.opencl:
|
||||||
CMAKE_ARGS="-DLLAMA_CLBLAST=on" python3 -m pip install --verbose -e .
|
CMAKE_ARGS="-DLLAMA_CLBLAST=on" python3 -m pip install --verbose -e .
|
||||||
|
|
Loading…
Reference in a new issue