Add basic langchain demo

This commit is contained in:
Andrei Betlen 2023-03-23 16:25:24 -04:00
parent eec9256a42
commit 90c78723de

View file

@ -0,0 +1,29 @@
from llama_cpp import Llama
from langchain.llms.base import LLM
from typing import Optional, List, Mapping, Any
class LlamaLLM(LLM):
model_path: str
llm: Llama
@property
def _llm_type(self) -> str:
return "llama-cpp-python"
def __init__(self, model_path: str, **kwargs: Any):
model_path = model_path
llm = Llama(model_path=model_path)
super().__init__(model_path=model_path, llm=llm, **kwargs)
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
response = self.llm(prompt, stop=stop or [])
return response["choices"][0]["text"]
@property
def _identifying_params(self) -> Mapping[str, Any]:
return {"model_path": self.model_path}
llm = LlamaLLM(model_path="models/...")
print(llm("Question: What is the capital of France? Answer: ", stop=["Question:", "\n"]))