separate prompt into template and system
This commit is contained in:
parent
2d305fa99a
commit
df146c41e2
3 changed files with 113 additions and 85 deletions
101
parser/parser.go
101
parser/parser.go
|
@ -2,76 +2,91 @@ package parser
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Command struct {
|
type Command struct {
|
||||||
Name string
|
Name string
|
||||||
Arg string
|
Args string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Command) Reset() {
|
||||||
|
c.Name = ""
|
||||||
|
c.Args = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(reader io.Reader) ([]Command, error) {
|
func Parse(reader io.Reader) ([]Command, error) {
|
||||||
var commands []Command
|
var commands []Command
|
||||||
var foundModel bool
|
|
||||||
|
var command, modelCommand Command
|
||||||
|
|
||||||
scanner := bufio.NewScanner(reader)
|
scanner := bufio.NewScanner(reader)
|
||||||
multiline := false
|
scanner.Split(scanModelfile)
|
||||||
var multilineCommand *Command
|
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
line := scanner.Text()
|
line := scanner.Bytes()
|
||||||
if multiline {
|
|
||||||
// If we're in a multiline string and the line is """, end the multiline string.
|
fields := bytes.SplitN(line, []byte(" "), 2)
|
||||||
if strings.TrimSpace(line) == `"""` {
|
|
||||||
multiline = false
|
|
||||||
commands = append(commands, *multilineCommand)
|
|
||||||
} else {
|
|
||||||
// Otherwise, append the line to the multiline string.
|
|
||||||
multilineCommand.Arg += "\n" + line
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fields := strings.Fields(line)
|
|
||||||
if len(fields) == 0 {
|
if len(fields) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
command := Command{}
|
switch string(bytes.ToUpper(fields[0])) {
|
||||||
switch strings.ToUpper(fields[0]) {
|
|
||||||
case "FROM":
|
case "FROM":
|
||||||
command.Name = "model"
|
command.Name = "model"
|
||||||
command.Arg = fields[1]
|
command.Args = string(fields[1])
|
||||||
if command.Arg == "" {
|
// copy command for validation
|
||||||
return nil, fmt.Errorf("no model specified in FROM line")
|
modelCommand = command
|
||||||
}
|
case "LICENSE", "TEMPLATE", "SYSTEM":
|
||||||
foundModel = true
|
command.Name = string(bytes.ToLower(fields[0]))
|
||||||
case "PROMPT", "LICENSE":
|
command.Args = string(fields[1])
|
||||||
command.Name = strings.ToLower(fields[0])
|
|
||||||
if fields[1] == `"""` {
|
|
||||||
multiline = true
|
|
||||||
multilineCommand = &command
|
|
||||||
multilineCommand.Arg = ""
|
|
||||||
} else {
|
|
||||||
command.Arg = strings.Join(fields[1:], " ")
|
|
||||||
}
|
|
||||||
case "PARAMETER":
|
case "PARAMETER":
|
||||||
command.Name = fields[1]
|
fields = bytes.SplitN(fields[1], []byte(" "), 2)
|
||||||
command.Arg = strings.Join(fields[2:], " ")
|
command.Name = string(fields[0])
|
||||||
|
command.Args = string(fields[1])
|
||||||
default:
|
default:
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !multiline {
|
|
||||||
commands = append(commands, command)
|
commands = append(commands, command)
|
||||||
}
|
command.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !foundModel {
|
if modelCommand.Args == "" {
|
||||||
return nil, fmt.Errorf("no FROM line for the model was specified")
|
return nil, fmt.Errorf("no FROM line for the model was specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
if multiline {
|
|
||||||
return nil, fmt.Errorf("unclosed multiline string")
|
|
||||||
}
|
|
||||||
return commands, scanner.Err()
|
return commands, scanner.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func scanModelfile(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
|
if atEOF || len(data) == 0 {
|
||||||
|
return 0, nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
newline := bytes.IndexByte(data, '\n')
|
||||||
|
|
||||||
|
if start := bytes.Index(data, []byte(`"""`)); start >= 0 && start < newline {
|
||||||
|
end := bytes.Index(data[start+3:], []byte(`"""`))
|
||||||
|
if end < 0 {
|
||||||
|
return 0, nil, errors.New(`unterminated multiline string: """`)
|
||||||
|
}
|
||||||
|
|
||||||
|
n := start + 3 + end + 3
|
||||||
|
return n, bytes.Replace(data[:n], []byte(`"""`), []byte(""), 2), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if start := bytes.Index(data, []byte(`'''`)); start >= 0 && start < newline {
|
||||||
|
end := bytes.Index(data[start+3:], []byte(`'''`))
|
||||||
|
if end < 0 {
|
||||||
|
return 0, nil, errors.New("unterminated multiline string: '''")
|
||||||
|
}
|
||||||
|
|
||||||
|
n := start + 3 + end + 3
|
||||||
|
return n, bytes.Replace(data[:n], []byte("'''"), []byte(""), 2), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return bufio.ScanLines(data, atEOF)
|
||||||
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
"github.com/jmorganca/ollama/api"
|
"github.com/jmorganca/ollama/api"
|
||||||
"github.com/jmorganca/ollama/parser"
|
"github.com/jmorganca/ollama/parser"
|
||||||
|
@ -24,10 +25,33 @@ import (
|
||||||
type Model struct {
|
type Model struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
ModelPath string
|
ModelPath string
|
||||||
Prompt string
|
Template string
|
||||||
|
System string
|
||||||
Options api.Options
|
Options api.Options
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *Model) Prompt(request api.GenerateRequest) (string, error) {
|
||||||
|
tmpl, err := template.New("").Parse(m.Template)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
var vars struct {
|
||||||
|
System string
|
||||||
|
Prompt string
|
||||||
|
}
|
||||||
|
|
||||||
|
vars.System = m.System
|
||||||
|
vars.Prompt = request.Prompt
|
||||||
|
|
||||||
|
var sb strings.Builder
|
||||||
|
if err := tmpl.Execute(&sb, vars); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
type ManifestV2 struct {
|
type ManifestV2 struct {
|
||||||
SchemaVersion int `json:"schemaVersion"`
|
SchemaVersion int `json:"schemaVersion"`
|
||||||
MediaType string `json:"mediaType"`
|
MediaType string `json:"mediaType"`
|
||||||
|
@ -71,20 +95,19 @@ func GetManifest(mp ModelPath) (*ManifestV2, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = os.Stat(fp); err != nil && !errors.Is(err, os.ErrNotExist) {
|
if _, err = os.Stat(fp); err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||||
return nil, fmt.Errorf("couldn't find model '%s'", mp.GetShortTagname())
|
return nil, fmt.Errorf("couldn't find model '%s'", mp.GetShortTagname())
|
||||||
}
|
}
|
||||||
|
|
||||||
var manifest *ManifestV2
|
var manifest *ManifestV2
|
||||||
|
|
||||||
f, err := os.Open(fp)
|
bts, err := os.ReadFile(fp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("couldn't open file '%s'", fp)
|
return nil, fmt.Errorf("couldn't open file '%s'", fp)
|
||||||
}
|
}
|
||||||
|
|
||||||
decoder := json.NewDecoder(f)
|
if err := json.Unmarshal(bts, &manifest); err != nil {
|
||||||
err = decoder.Decode(&manifest)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,12 +135,20 @@ func GetModel(name string) (*Model, error) {
|
||||||
switch layer.MediaType {
|
switch layer.MediaType {
|
||||||
case "application/vnd.ollama.image.model":
|
case "application/vnd.ollama.image.model":
|
||||||
model.ModelPath = filename
|
model.ModelPath = filename
|
||||||
case "application/vnd.ollama.image.prompt":
|
case "application/vnd.ollama.image.template":
|
||||||
data, err := os.ReadFile(filename)
|
bts, err := os.ReadFile(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
model.Prompt = string(data)
|
|
||||||
|
model.Template = string(bts)
|
||||||
|
case "application/vnd.ollama.image.system":
|
||||||
|
bts, err := os.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
model.System = string(bts)
|
||||||
case "application/vnd.ollama.image.params":
|
case "application/vnd.ollama.image.params":
|
||||||
params, err := os.Open(filename)
|
params, err := os.Open(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -156,13 +187,13 @@ func CreateModel(name string, path string, fn func(status string)) error {
|
||||||
params := make(map[string]string)
|
params := make(map[string]string)
|
||||||
|
|
||||||
for _, c := range commands {
|
for _, c := range commands {
|
||||||
log.Printf("[%s] - %s\n", c.Name, c.Arg)
|
log.Printf("[%s] - %s\n", c.Name, c.Args)
|
||||||
switch c.Name {
|
switch c.Name {
|
||||||
case "model":
|
case "model":
|
||||||
fn("looking for model")
|
fn("looking for model")
|
||||||
mf, err := GetManifest(ParseModelPath(c.Arg))
|
mf, err := GetManifest(ParseModelPath(c.Args))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fp := c.Arg
|
fp := c.Args
|
||||||
|
|
||||||
// If filePath starts with ~/, replace it with the user's home directory.
|
// If filePath starts with ~/, replace it with the user's home directory.
|
||||||
if strings.HasPrefix(fp, "~/") {
|
if strings.HasPrefix(fp, "~/") {
|
||||||
|
@ -183,7 +214,7 @@ func CreateModel(name string, path string, fn func(status string)) error {
|
||||||
fn("creating model layer")
|
fn("creating model layer")
|
||||||
file, err := os.Open(fp)
|
file, err := os.Open(fp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fn(fmt.Sprintf("couldn't find model '%s'", c.Arg))
|
fn(fmt.Sprintf("couldn't find model '%s'", c.Args))
|
||||||
return fmt.Errorf("failed to open file: %v", err)
|
return fmt.Errorf("failed to open file: %v", err)
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
@ -206,31 +237,21 @@ func CreateModel(name string, path string, fn func(status string)) error {
|
||||||
layers = append(layers, newLayer)
|
layers = append(layers, newLayer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case "prompt":
|
case "license", "template", "system":
|
||||||
fn("creating prompt layer")
|
fn(fmt.Sprintf("creating %s layer", c.Name))
|
||||||
// remove the prompt layer if one exists
|
// remove the prompt layer if one exists
|
||||||
layers = removeLayerFromLayers(layers, "application/vnd.ollama.image.prompt")
|
mediaType := fmt.Sprintf("application/vnd.ollama.image.%s", c.Name)
|
||||||
|
layers = removeLayerFromLayers(layers, mediaType)
|
||||||
|
|
||||||
prompt := strings.NewReader(c.Arg)
|
layer, err := CreateLayer(strings.NewReader(c.Args))
|
||||||
l, err := CreateLayer(prompt)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fn(fmt.Sprintf("couldn't create prompt layer: %v", err))
|
return err
|
||||||
return fmt.Errorf("failed to create layer: %v", err)
|
|
||||||
}
|
}
|
||||||
l.MediaType = "application/vnd.ollama.image.prompt"
|
|
||||||
layers = append(layers, l)
|
layer.MediaType = mediaType
|
||||||
case "license":
|
layers = append(layers, layer)
|
||||||
fn("creating license layer")
|
|
||||||
license := strings.NewReader(c.Arg)
|
|
||||||
l, err := CreateLayer(license)
|
|
||||||
if err != nil {
|
|
||||||
fn(fmt.Sprintf("couldn't create license layer: %v", err))
|
|
||||||
return fmt.Errorf("failed to create layer: %v", err)
|
|
||||||
}
|
|
||||||
l.MediaType = "application/vnd.ollama.image.license"
|
|
||||||
layers = append(layers, l)
|
|
||||||
default:
|
default:
|
||||||
params[c.Name] = c.Arg
|
params[c.Name] = c.Args
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"dario.cat/mergo"
|
"dario.cat/mergo"
|
||||||
|
@ -54,19 +53,12 @@ func generate(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
templ, err := template.New("").Parse(model.Prompt)
|
prompt, err := model.Prompt(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var sb strings.Builder
|
|
||||||
if err = templ.Execute(&sb, req); err != nil {
|
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
req.Prompt = sb.String()
|
|
||||||
|
|
||||||
llm, err := llama.New(model.ModelPath, opts)
|
llm, err := llama.New(model.ModelPath, opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
@ -77,7 +69,7 @@ func generate(c *gin.Context) {
|
||||||
ch := make(chan any)
|
ch := make(chan any)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(ch)
|
defer close(ch)
|
||||||
llm.Predict(req.Context, req.Prompt, func(r api.GenerateResponse) {
|
llm.Predict(req.Context, prompt, func(r api.GenerateResponse) {
|
||||||
r.Model = req.Model
|
r.Model = req.Model
|
||||||
r.CreatedAt = time.Now().UTC()
|
r.CreatedAt = time.Now().UTC()
|
||||||
if r.Done {
|
if r.Done {
|
||||||
|
|
Loading…
Reference in a new issue