2023-11-14 01:20:34 +00:00
#!powershell
$ErrorActionPreference = " Stop "
2024-02-16 01:15:09 +00:00
function amdGPUs {
if ( $env:AMDGPU_TARGETS ) {
return $env:AMDGPU_TARGETS
}
# TODO - load from some common data file for linux + windows build consistency
$GPU_LIST = @ (
" gfx900 "
" gfx906:xnack- "
" gfx908:xnack- "
" gfx90a:xnack+ "
" gfx90a:xnack- "
2024-03-15 22:34:58 +00:00
" gfx940 "
" gfx941 "
" gfx942 "
2024-02-16 01:15:09 +00:00
" gfx1010 "
" gfx1012 "
" gfx1030 "
" gfx1100 "
" gfx1101 "
" gfx1102 "
)
$GPU_LIST -join ';'
}
2023-11-14 01:20:34 +00:00
function init_vars {
2024-02-16 01:15:09 +00:00
# Verify the environment is a Developer Shell for MSVC 2019
write-host $env:VSINSTALLDIR
if ( ( $env:VSINSTALLDIR -eq $null ) ) {
Write-Error " `r `n BUILD ERROR - YOUR DEVELOPMENT ENVIRONMENT IS NOT SET UP CORRECTLY `r `n To build Ollama you must run from an MSVC Developer Shell `r `n See .\docs\development.md for instructions to set up your dev environment "
exit 1
}
2024-02-16 23:33:16 +00:00
$script:SRC_DIR = $ ( resolve-path " ..\..\ " )
2024-01-04 17:40:15 +00:00
$script:llamacppDir = " ../llama.cpp "
2024-02-16 01:15:09 +00:00
$script:cmakeDefs = @ (
" -DBUILD_SHARED_LIBS=on " ,
" -DLLAMA_NATIVE=off "
)
2024-01-13 00:28:00 +00:00
$script:cmakeTargets = @ ( " ext_server " )
$script:ARCH = " amd64 " # arm not yet supported.
2023-11-14 01:20:34 +00:00
if ( $env:CGO_CFLAGS -contains " -g " ) {
2024-02-16 01:15:09 +00:00
$script:cmakeDefs + = @ ( " -DCMAKE_VERBOSE_MAKEFILE=on " , " -DLLAMA_SERVER_VERBOSE=on " , " -DCMAKE_BUILD_TYPE=RelWithDebInfo " )
2023-12-20 22:46:15 +00:00
$script:config = " RelWithDebInfo "
2023-11-14 01:20:34 +00:00
} else {
2024-02-16 01:15:09 +00:00
$script:cmakeDefs + = @ ( " -DLLAMA_SERVER_VERBOSE=off " , " -DCMAKE_BUILD_TYPE=Release " )
2023-12-20 22:46:15 +00:00
$script:config = " Release "
2023-11-14 01:20:34 +00:00
}
2024-02-16 01:15:09 +00:00
if ( $null -ne $env:CMAKE_SYSTEM_VERSION ) {
$script:cmakeDefs + = @ ( " -DCMAKE_SYSTEM_VERSION= ${env:CMAKE_SYSTEM_VERSION} " )
}
2024-01-13 00:28:00 +00:00
# Try to find the CUDA dir
if ( $env:CUDA_LIB_DIR -eq $null ) {
$d = ( get-command -ea 'silentlycontinue' nvcc ) . path
if ( $d -ne $null ) {
$script:CUDA_LIB_DIR = ( $d | split-path -parent )
2023-12-27 00:03:45 +00:00
$script:CUDA_INCLUDE_DIR = ( $script:CUDA_LIB_DIR | split-path -parent ) + " \include "
2024-01-13 00:28:00 +00:00
}
} else {
$script:CUDA_LIB_DIR = $env:CUDA_LIB_DIR
}
2024-01-19 18:23:03 +00:00
$script:GZIP = ( get-command -ea 'silentlycontinue' gzip ) . path
2024-01-13 00:28:00 +00:00
$script:DUMPBIN = ( get-command -ea 'silentlycontinue' dumpbin ) . path
2024-01-20 20:15:50 +00:00
if ( $null -eq $env:CMAKE_CUDA_ARCHITECTURES ) {
$script:CMAKE_CUDA_ARCHITECTURES = " 50;52;61;70;75;80 "
} else {
$script:CMAKE_CUDA_ARCHITECTURES = $env:CMAKE_CUDA_ARCHITECTURES
}
2024-03-07 18:54:21 +00:00
# Note: Windows Kits 10 signtool crashes with GCP's plugin
if ( $null -eq $env:SIGN_TOOL ) {
$ { script : SignTool } = " C:\Program Files (x86)\Windows Kits\8.1\bin\x64\signtool.exe "
} else {
$ { script : SignTool } = $ { env : SIGN_TOOL }
}
2024-02-16 23:33:16 +00:00
if ( " ${env:KEY_CONTAINER} " ) {
$ { script : OLLAMA_CERT } = $ ( resolve-path " ${script:SRC_DIR} \ollama_inc.crt " )
}
2023-11-14 01:20:34 +00:00
}
function git_module_setup {
# TODO add flags to skip the init/patch logic to make it easier to mod llama.cpp code in-repo
& git submodule init
2023-12-20 22:46:15 +00:00
if ( $LASTEXITCODE -ne 0 ) { exit ( $LASTEXITCODE ) }
2024-01-04 17:40:15 +00:00
& git submodule update - -force " ${script:llamacppDir} "
2023-12-20 22:46:15 +00:00
if ( $LASTEXITCODE -ne 0 ) { exit ( $LASTEXITCODE ) }
2023-11-14 01:20:34 +00:00
}
function apply_patches {
2023-12-22 17:51:53 +00:00
# Wire up our CMakefile
2024-03-12 20:51:44 +00:00
if ( ! ( Select-String -Path " ${script:llamacppDir} /CMakeLists.txt " -Pattern 'ollama' ) ) {
Add-Content -Path " ${script:llamacppDir} /CMakeLists.txt " -Value 'add_subdirectory(../ext_server ext_server) # ollama'
2023-11-14 01:20:34 +00:00
}
2024-01-25 21:46:20 +00:00
# Apply temporary patches until fix is upstream
$patches = Get-ChildItem " ../patches/*.diff "
foreach ( $patch in $patches ) {
# Extract file paths from the patch file
$filePaths = Get-Content $patch . FullName | Where-Object { $_ -match '^\+\+\+ ' } | ForEach-Object {
$parts = $_ -split ' '
( $parts [ 1 ] -split '/' , 2 ) [ 1 ]
}
# Checkout each file
2023-12-27 00:03:45 +00:00
Set-Location -Path $ { script : llamacppDir }
2024-01-25 21:46:20 +00:00
foreach ( $file in $filePaths ) {
git checkout $file
}
}
# Apply each patch
foreach ( $patch in $patches ) {
Set-Location -Path $ { script : llamacppDir }
git apply $patch . FullName
}
2023-11-14 01:20:34 +00:00
}
function build {
2024-01-04 17:40:15 +00:00
write-host " generating config with: cmake -S ${script:llamacppDir} -B $script:buildDir $script:cmakeDefs "
2023-11-14 01:20:34 +00:00
& cmake - -version
2024-01-04 17:40:15 +00:00
& cmake -S " ${script:llamacppDir} " -B $script:buildDir $script:cmakeDefs
2023-12-20 22:46:15 +00:00
if ( $LASTEXITCODE -ne 0 ) { exit ( $LASTEXITCODE ) }
2023-12-22 17:51:53 +00:00
write-host " building with: cmake --build $script:buildDir --config $script:config ( $script:cmakeTargets | ForEach-Object { " - -target " , $_ }) "
& cmake - -build $script:buildDir - -config $script:config ( $script:cmakeTargets | ForEach-Object { " --target " , $_ } )
2023-12-20 22:46:15 +00:00
if ( $LASTEXITCODE -ne 0 ) { exit ( $LASTEXITCODE ) }
2023-11-14 01:20:34 +00:00
}
2023-12-23 19:35:44 +00:00
function install {
rm -ea 0 -recurse -force -path " ${script:buildDir} /lib "
md " ${script:buildDir} /lib " -ea 0 > $null
2024-01-13 00:28:00 +00:00
cp " ${script:buildDir} /bin/ ${script:config} /ext_server.dll " " ${script:buildDir} /lib "
2023-12-23 19:35:44 +00:00
cp " ${script:buildDir} /bin/ ${script:config} /llama.dll " " ${script:buildDir} /lib "
2024-01-04 16:41:41 +00:00
# Display the dll dependencies in the build log
2024-01-13 00:28:00 +00:00
if ( $script:DUMPBIN -ne $null ) {
& " $script:DUMPBIN " / dependents " ${script:buildDir} /bin/ ${script:config} /ext_server.dll " | select-string " .dll "
}
}
2023-12-27 00:03:45 +00:00
function sign {
if ( " ${env:KEY_CONTAINER} " ) {
write-host " Signing ${script:buildDir} /lib/*.dll "
foreach ( $file in ( get-childitem " ${script:buildDir} /lib/*.dll " ) ) {
2024-03-07 18:54:21 +00:00
& " ${script:SignTool} " sign / v / debug / fd sha256 / t http : / / timestamp . digicert . com / f " ${script:OLLAMA_CERT} " `
2023-12-27 00:03:45 +00:00
/ csp " Google Cloud KMS Provider " / kc " ${env:KEY_CONTAINER} " $file
if ( $LASTEXITCODE -ne 0 ) { exit ( $LASTEXITCODE ) }
}
}
}
2024-01-13 00:28:00 +00:00
function compress_libs {
2024-01-19 18:23:03 +00:00
if ( $script:GZIP -eq $null ) {
write-host " gzip not installed, not compressing files "
2024-01-13 00:28:00 +00:00
return
}
write-host " Compressing dlls... "
$libs = dir " ${script:buildDir} /lib/*.dll "
foreach ( $file in $libs ) {
2024-01-20 00:24:11 +00:00
& " $script:GZIP " - -best -f $file
2024-01-13 00:28:00 +00:00
}
2023-12-23 19:35:44 +00:00
}
2023-12-22 17:51:53 +00:00
function cleanup {
2023-12-27 00:03:45 +00:00
$patches = Get-ChildItem " ../patches/*.diff "
foreach ( $patch in $patches ) {
# Extract file paths from the patch file
$filePaths = Get-Content $patch . FullName | Where-Object { $_ -match '^\+\+\+ ' } | ForEach-Object {
$parts = $_ -split ' '
( $parts [ 1 ] -split '/' , 2 ) [ 1 ]
}
# Checkout each file
Set-Location -Path $ { script : llamacppDir }
foreach ( $file in $filePaths ) {
git checkout $file
}
}
2024-03-12 20:51:44 +00:00
Set-Location " ${script:llamacppDir} / "
git checkout CMakeLists . txt
2023-12-27 00:03:45 +00:00
2023-11-14 01:20:34 +00:00
}
init_vars
git_module_setup
apply_patches
2023-12-20 22:46:15 +00:00
2024-01-10 04:29:58 +00:00
# -DLLAMA_AVX -- 2011 Intel Sandy Bridge & AMD Bulldozer
# -DLLAMA_F16C -- 2012 Intel Ivy Bridge & AMD 2011 Bulldozer (No significant improvement over just AVX)
# -DLLAMA_AVX2 -- 2013 Intel Haswell & 2015 AMD Excavator / 2017 AMD Zen
# -DLLAMA_FMA (FMA3) -- 2013 Intel Haswell & 2012 AMD Piledriver
2024-02-21 21:35:34 +00:00
$script:commonCpuDefs = @ ( " -DCMAKE_POSITION_INDEPENDENT_CODE=on " )
2024-01-10 04:29:58 +00:00
2024-03-07 18:54:21 +00:00
if ( $null -eq $ { env : OLLAMA_SKIP_CPU_GENERATE } ) {
2024-01-10 04:29:58 +00:00
2024-03-07 18:54:21 +00:00
init_vars
$script:cmakeDefs = $script:commonCpuDefs + @ ( " -A " , " x64 " , " -DLLAMA_AVX=off " , " -DLLAMA_AVX2=off " , " -DLLAMA_AVX512=off " , " -DLLAMA_FMA=off " , " -DLLAMA_F16C=off " ) + $script:cmakeDefs
$script:buildDir = " ${script:llamacppDir} /build/windows/ ${script:ARCH} /cpu "
write-host " Building LCD CPU "
build
install
sign
compress_libs
2023-12-20 22:46:15 +00:00
2024-03-07 18:54:21 +00:00
init_vars
$script:cmakeDefs = $script:commonCpuDefs + @ ( " -A " , " x64 " , " -DLLAMA_AVX=on " , " -DLLAMA_AVX2=off " , " -DLLAMA_AVX512=off " , " -DLLAMA_FMA=off " , " -DLLAMA_F16C=off " ) + $script:cmakeDefs
$script:buildDir = " ${script:llamacppDir} /build/windows/ ${script:ARCH} /cpu_avx "
write-host " Building AVX CPU "
build
install
sign
compress_libs
init_vars
$script:cmakeDefs = $script:commonCpuDefs + @ ( " -A " , " x64 " , " -DLLAMA_AVX=on " , " -DLLAMA_AVX2=on " , " -DLLAMA_AVX512=off " , " -DLLAMA_FMA=on " , " -DLLAMA_F16C=on " ) + $script:cmakeDefs
$script:buildDir = " ${script:llamacppDir} /build/windows/ ${script:ARCH} /cpu_avx2 "
write-host " Building AVX2 CPU "
build
install
sign
compress_libs
} else {
write-host " Skipping CPU generation step as requested "
}
2023-12-20 22:46:15 +00:00
2024-01-13 00:28:00 +00:00
if ( $null -ne $script:CUDA_LIB_DIR ) {
# Then build cuda as a dynamically loaded library
2023-12-27 00:03:45 +00:00
$nvcc = " $script:CUDA_LIB_DIR \nvcc.exe "
$script:CUDA_VERSION = ( get-item ( $nvcc | split-path | split-path ) ) . Basename
2024-01-13 00:28:00 +00:00
if ( $null -ne $script:CUDA_VERSION ) {
$script:CUDA_VARIANT = " _ " + $script:CUDA_VERSION
}
init_vars
$script:buildDir = " ${script:llamacppDir} /build/windows/ ${script:ARCH} /cuda $script:CUDA_VARIANT "
2024-02-16 01:15:09 +00:00
$script:cmakeDefs + = @ ( " -A " , " x64 " , " -DLLAMA_CUBLAS=ON " , " -DLLAMA_AVX=on " , " -DLLAMA_AVX2=off " , " -DCUDAToolkit_INCLUDE_DIR= $script:CUDA_INCLUDE_DIR " , " -DCMAKE_CUDA_ARCHITECTURES= ${script:CMAKE_CUDA_ARCHITECTURES} " )
write-host " Building CUDA "
2024-01-13 00:28:00 +00:00
build
install
2023-12-27 00:03:45 +00:00
sign
2024-01-13 00:28:00 +00:00
compress_libs
}
2023-11-29 19:00:37 +00:00
2024-02-16 01:15:09 +00:00
if ( $null -ne $env:HIP_PATH ) {
$script:ROCM_VERSION = ( get-item $env:HIP_PATH ) . Basename
if ( $null -ne $script:ROCM_VERSION ) {
$script:ROCM_VARIANT = " _v " + $script:ROCM_VERSION
}
init_vars
$script:buildDir = " ${script:llamacppDir} /build/windows/ ${script:ARCH} /rocm $script:ROCM_VARIANT "
$script:cmakeDefs + = @ (
" -G " , " Ninja " ,
" -DCMAKE_C_COMPILER=clang.exe " ,
" -DCMAKE_CXX_COMPILER=clang++.exe " ,
" -DLLAMA_HIPBLAS=on " ,
" -DLLAMA_AVX=on " ,
" -DLLAMA_AVX2=off " ,
" -DCMAKE_POSITION_INDEPENDENT_CODE=on " ,
" -DAMDGPU_TARGETS= $( amdGPUs ) " ,
" -DGPU_TARGETS= $( amdGPUs ) "
)
# Make sure the ROCm binary dir is first in the path
$env:PATH = " $env:HIP_PATH \bin; $env:VSINSTALLDIR \Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja; $env:PATH "
# We have to clobber the LIB var from the developer shell for clang to work properly
$env:LIB = " "
write-host " Building ROCm "
build
# Ninja doesn't prefix with config name
$ { script : config } = " "
install
if ( $null -ne $script:DUMPBIN ) {
& " $script:DUMPBIN " / dependents " ${script:buildDir} /bin/ ${script:config} /ext_server.dll " | select-string " .dll "
}
sign
compress_libs
}
2023-12-22 17:51:53 +00:00
cleanup
2024-03-07 18:54:21 +00:00
write-host " `n go generate completed. LLM runners: $( get-childitem -path $ { script : SRC_DIR } \ llm \ llama . cpp \ build \ windows \ $ { script : ARCH } ) "