29 lines
No EOL
875 B
Python
29 lines
No EOL
875 B
Python
from llama_cpp import Llama
|
|
|
|
from langchain.llms.base import LLM
|
|
from typing import Optional, List, Mapping, Any
|
|
|
|
class LlamaLLM(LLM):
|
|
model_path: str
|
|
llm: Llama
|
|
|
|
@property
|
|
def _llm_type(self) -> str:
|
|
return "llama-cpp-python"
|
|
|
|
def __init__(self, model_path: str, **kwargs: Any):
|
|
model_path = model_path
|
|
llm = Llama(model_path=model_path)
|
|
super().__init__(model_path=model_path, llm=llm, **kwargs)
|
|
|
|
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
|
response = self.llm(prompt, stop=stop or [])
|
|
return response["choices"][0]["text"]
|
|
|
|
@property
|
|
def _identifying_params(self) -> Mapping[str, Any]:
|
|
return {"model_path": self.model_path}
|
|
|
|
llm = LlamaLLM(model_path="models/...")
|
|
|
|
print(llm("Question: What is the capital of France? Answer: ", stop=["Question:", "\n"])) |