From 0ce66bc080fe537590b05b24bf442480bf2dd045 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Thu, 15 Feb 2024 16:09:48 -0500 Subject: [PATCH] fix: create_embedding broken response for input type str --- llama_cpp/llama.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index f3c7b4f..964b0c8 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -720,6 +720,8 @@ class Llama: assert self._model.model is not None model_name: str = model if model is not None else self.model_path + input = input if isinstance(input, list) else [input] + # get numeric embeddings embeds: List[List[float]] total_tokens: int