50 lines
1 KiB
Python
50 lines
1 KiB
Python
"""Example FastAPI server for llama.cpp.
|
|
|
|
To run this example:
|
|
|
|
```bash
|
|
pip install fastapi uvicorn sse-starlette
|
|
export MODEL=../models/7B/...
|
|
```
|
|
|
|
Then run:
|
|
```
|
|
uvicorn llama_cpp.server.app:app --reload
|
|
```
|
|
|
|
or
|
|
|
|
```
|
|
python3 -m llama_cpp.server
|
|
```
|
|
|
|
Then visit http://localhost:8000/docs to see the interactive API docs.
|
|
|
|
"""
|
|
import os
|
|
import argparse
|
|
|
|
import uvicorn
|
|
|
|
from llama_cpp.server.app import create_app, Settings
|
|
|
|
if __name__ == "__main__":
|
|
parser = argparse.ArgumentParser(
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
|
)
|
|
for name, field in Settings.__fields__.items():
|
|
parser.add_argument(
|
|
f"--{name}",
|
|
dest=name,
|
|
type=field.type_,
|
|
default=field.default,
|
|
help=field.field_info.description,
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
settings = Settings(**{k: v for k, v in vars(args).items() if v is not None})
|
|
app = create_app(settings=settings)
|
|
|
|
uvicorn.run(
|
|
app, host=os.getenv("HOST", "localhost"), port=int(os.getenv("PORT", 8000))
|
|
)
|