Merge pull request #1684 from dhiltgen/tag_integration_tests
Guard integration tests with a tag
This commit is contained in:
commit
cf50ef5b51
4 changed files with 15 additions and 5 deletions
|
@ -2,6 +2,9 @@
|
|||
|
||||
# This script sets up integration tests which run the full stack to verify
|
||||
# inference locally
|
||||
#
|
||||
# To run the relevant tests use
|
||||
# go test -tags=integration ./server
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
|
@ -21,15 +24,15 @@ for model in ${TEST_MODELS[@]}; do
|
|||
echo "Pulling manifest for ${TEST_MODEL}:${TEST_MODEL_TAG}"
|
||||
curl -s --header "${ACCEPT_HEADER}" \
|
||||
-o ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} \
|
||||
${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/manifests/${TEST_MODEL_TAG}
|
||||
${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/manifests/${TEST_MODEL_TAG}
|
||||
|
||||
CFG_HASH=$(cat ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} | jq -r ".config.digest")
|
||||
echo "Pulling config blob ${CFG_HASH}"
|
||||
curl -L -C - --header "${ACCEPT_HEADER}" \
|
||||
-o ${OLLAMA_MODELS}/blobs/${CFG_HASH} \
|
||||
${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/blobs/${CFG_HASH}
|
||||
-o ${OLLAMA_MODELS}/blobs/${CFG_HASH} \
|
||||
${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/blobs/${CFG_HASH}
|
||||
|
||||
for LAYER in $(cat ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} | jq -r ".layers[].digest" ) ; do
|
||||
for LAYER in $(cat ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} | jq -r ".layers[].digest"); do
|
||||
echo "Pulling blob ${LAYER}"
|
||||
curl -L -C - --header "${ACCEPT_HEADER}" \
|
||||
-o ${OLLAMA_MODELS}/blobs/${LAYER} \
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//go:build integration
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//go:build integration
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//go:build integration
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
|
@ -38,7 +40,7 @@ func PrepareModelForPrompts(t *testing.T, modelName string, opts api.Options) (*
|
|||
}
|
||||
|
||||
func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRequest, model *Model, runner llm.LLM) string {
|
||||
prompt, err := model.Prompt(PromptVars{
|
||||
prompt, err := model.PreResponsePrompt(PromptVars{
|
||||
System: req.System,
|
||||
Prompt: req.Prompt,
|
||||
First: len(req.Context) == 0,
|
||||
|
@ -54,6 +56,7 @@ func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRe
|
|||
success <- true
|
||||
}
|
||||
}
|
||||
|
||||
predictReq := llm.PredictOpts{
|
||||
Prompt: prompt,
|
||||
Format: req.Format,
|
||||
|
|
Loading…
Reference in a new issue