2023-03-23 09:33:06 +00:00
|
|
|
import json
|
2023-03-23 18:57:31 +00:00
|
|
|
import argparse
|
|
|
|
|
2023-03-23 09:33:06 +00:00
|
|
|
from llama_cpp import Llama
|
|
|
|
|
2023-03-23 18:57:31 +00:00
|
|
|
parser = argparse.ArgumentParser()
|
2023-04-01 17:01:45 +00:00
|
|
|
parser.add_argument("-m", "--model", type=str, default="./models/...")
|
2023-03-23 18:57:31 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
llm = Llama(model_path=args.model)
|
2023-03-23 09:33:06 +00:00
|
|
|
|
2023-03-24 18:35:41 +00:00
|
|
|
output = llm(
|
|
|
|
"Question: What are the names of the planets in the solar system? Answer: ",
|
2023-04-04 00:33:07 +00:00
|
|
|
max_tokens=48,
|
2023-03-24 18:35:41 +00:00
|
|
|
stop=["Q:", "\n"],
|
|
|
|
echo=True,
|
|
|
|
)
|
2023-03-23 09:33:06 +00:00
|
|
|
|
2023-03-24 18:35:41 +00:00
|
|
|
print(json.dumps(output, indent=2))
|