rfc: dynamic environ lookup

This commit is contained in:
Michael Yang 2024-07-03 16:00:54 -07:00
parent 5784c05397
commit 35b89b2eab
7 changed files with 29 additions and 26 deletions

View file

@ -14,7 +14,7 @@ import (
func InitLogging() { func InitLogging() {
level := slog.LevelInfo level := slog.LevelInfo
if envconfig.Debug { if envconfig.Debug() {
level = slog.LevelDebug level = slog.LevelDebug
} }

View file

@ -26,11 +26,24 @@ func (o OllamaHost) String() string {
var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST") var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST")
// Debug returns true if the OLLAMA_DEBUG environment variable is set to a truthy value.
func Debug() bool {
if s := clean("OLLAMA_DEBUG"); s != "" {
b, err := strconv.ParseBool(s)
if err != nil {
// non-empty value is truthy
return true
}
return b
}
return false
}
var ( var (
// Set via OLLAMA_ORIGINS in the environment // Set via OLLAMA_ORIGINS in the environment
AllowOrigins []string AllowOrigins []string
// Set via OLLAMA_DEBUG in the environment
Debug bool
// Experimental flash attention // Experimental flash attention
FlashAttention bool FlashAttention bool
// Set via OLLAMA_HOST in the environment // Set via OLLAMA_HOST in the environment
@ -80,7 +93,7 @@ type EnvVar struct {
func AsMap() map[string]EnvVar { func AsMap() map[string]EnvVar {
ret := map[string]EnvVar{ ret := map[string]EnvVar{
"OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug, "Show additional debug information (e.g. OLLAMA_DEBUG=1)"}, "OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug(), "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
"OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention, "Enabled flash attention"}, "OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention, "Enabled flash attention"},
"OLLAMA_HOST": {"OLLAMA_HOST", Host, "IP Address for the ollama server (default 127.0.0.1:11434)"}, "OLLAMA_HOST": {"OLLAMA_HOST", Host, "IP Address for the ollama server (default 127.0.0.1:11434)"},
"OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive, "The duration that models stay loaded in memory (default \"5m\")"}, "OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive, "The duration that models stay loaded in memory (default \"5m\")"},
@ -137,15 +150,6 @@ func init() {
} }
func LoadConfig() { func LoadConfig() {
if debug := clean("OLLAMA_DEBUG"); debug != "" {
d, err := strconv.ParseBool(debug)
if err == nil {
Debug = d
} else {
Debug = true
}
}
if fa := clean("OLLAMA_FLASH_ATTENTION"); fa != "" { if fa := clean("OLLAMA_FLASH_ATTENTION"); fa != "" {
d, err := strconv.ParseBool(fa) d, err := strconv.ParseBool(fa)
if err == nil { if err == nil {

View file

@ -12,16 +12,15 @@ import (
) )
func TestConfig(t *testing.T) { func TestConfig(t *testing.T) {
Debug = false // Reset whatever was loaded in init()
t.Setenv("OLLAMA_DEBUG", "") t.Setenv("OLLAMA_DEBUG", "")
LoadConfig() require.False(t, Debug())
require.False(t, Debug)
t.Setenv("OLLAMA_DEBUG", "false") t.Setenv("OLLAMA_DEBUG", "false")
LoadConfig() require.False(t, Debug())
require.False(t, Debug)
t.Setenv("OLLAMA_DEBUG", "1") t.Setenv("OLLAMA_DEBUG", "1")
LoadConfig() require.True(t, Debug())
require.True(t, Debug)
t.Setenv("OLLAMA_FLASH_ATTENTION", "1") t.Setenv("OLLAMA_FLASH_ATTENTION", "1")
LoadConfig() LoadConfig()
require.True(t, FlashAttention) require.True(t, FlashAttention)

View file

@ -611,7 +611,7 @@ func LoadOneapiMgmt(oneapiLibPaths []string) (int, *C.oneapi_handle_t, string) {
} }
func getVerboseState() C.uint16_t { func getVerboseState() C.uint16_t {
if envconfig.Debug { if envconfig.Debug() {
return C.uint16_t(1) return C.uint16_t(1)
} }
return C.uint16_t(0) return C.uint16_t(0)

View file

@ -8,14 +8,14 @@ import (
"testing" "testing"
"github.com/ollama/ollama/api" "github.com/ollama/ollama/api"
"github.com/ollama/ollama/envconfig"
"github.com/ollama/ollama/gpu" "github.com/ollama/ollama/gpu"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestEstimateGPULayers(t *testing.T) { func TestEstimateGPULayers(t *testing.T) {
envconfig.Debug = true t.Setenv("OLLAMA_DEBUG", "1")
modelName := "dummy" modelName := "dummy"
f, err := os.CreateTemp(t.TempDir(), modelName) f, err := os.CreateTemp(t.TempDir(), modelName)
require.NoError(t, err) require.NoError(t, err)

View file

@ -195,7 +195,7 @@ func NewLlamaServer(gpus gpu.GpuInfoList, model string, ggml *GGML, adapters, pr
params = append(params, "--n-gpu-layers", fmt.Sprintf("%d", opts.NumGPU)) params = append(params, "--n-gpu-layers", fmt.Sprintf("%d", opts.NumGPU))
} }
if envconfig.Debug { if envconfig.Debug() {
params = append(params, "--verbose") params = append(params, "--verbose")
} }
@ -381,7 +381,7 @@ func NewLlamaServer(gpus gpu.GpuInfoList, model string, ggml *GGML, adapters, pr
} }
slog.Info("starting llama server", "cmd", s.cmd.String()) slog.Info("starting llama server", "cmd", s.cmd.String())
if envconfig.Debug { if envconfig.Debug() {
filteredEnv := []string{} filteredEnv := []string{}
for _, ev := range s.cmd.Env { for _, ev := range s.cmd.Env {
if strings.HasPrefix(ev, "CUDA_") || if strings.HasPrefix(ev, "CUDA_") ||

View file

@ -1093,7 +1093,7 @@ func (s *Server) GenerateRoutes() http.Handler {
func Serve(ln net.Listener) error { func Serve(ln net.Listener) error {
level := slog.LevelInfo level := slog.LevelInfo
if envconfig.Debug { if envconfig.Debug() {
level = slog.LevelDebug level = slog.LevelDebug
} }