Add support to get embeddings from high-level api. Closes #4
This commit is contained in:
parent
9ba5c3c3b7
commit
70b8a1ef75
2 changed files with 26 additions and 0 deletions
12
examples/high_level_api_embedding.py
Normal file
12
examples/high_level_api_embedding.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from llama_cpp import Llama
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("-m", "--model", type=str, default=".//models/...")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
llm = Llama(model_path=args.model, embedding=True)
|
||||||
|
|
||||||
|
print(llm.embed("Hello world!"))
|
|
@ -105,6 +105,20 @@ class Llama:
|
||||||
output += llama_cpp.llama_token_to_str(self.ctx, token)
|
output += llama_cpp.llama_token_to_str(self.ctx, token)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def embed(self, text: str):
|
||||||
|
"""Embed a string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: The utf-8 encoded string to embed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of embeddings.
|
||||||
|
"""
|
||||||
|
tokens = self.tokenize(text.encode("utf-8"))
|
||||||
|
self._eval(tokens, 0)
|
||||||
|
embeddings = llama_cpp.llama_get_embeddings(self.ctx)
|
||||||
|
return embeddings[:llama_cpp.llama_n_embd(self.ctx)]
|
||||||
|
|
||||||
def _eval(self, tokens: List[int], n_past):
|
def _eval(self, tokens: List[int], n_past):
|
||||||
rc = llama_cpp.llama_eval(
|
rc = llama_cpp.llama_eval(
|
||||||
self.ctx,
|
self.ctx,
|
||||||
|
|
Loading…
Reference in a new issue