From fcf15df1ef6ea26aba4833bd350716726240443c Mon Sep 17 00:00:00 2001 From: Bruce MacDonald Date: Mon, 26 Jun 2023 12:30:35 -0400 Subject: [PATCH] add cli --- README.md | 2 +- proto.py | 20 ++++++++++++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 8cdc1fe3..dd8f34e3 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ pip install -r requirements.txt Put your model in `models/` and run: ``` -python proto.py +python3 proto.py serve ``` To run the app: diff --git a/proto.py b/proto.py index be99550e..0292c201 100644 --- a/proto.py +++ b/proto.py @@ -1,6 +1,7 @@ import json import os import threading +import click from llama_cpp import Llama from flask import Flask, Response, stream_with_context, request from flask_cors import CORS @@ -100,6 +101,21 @@ def models_route_handler(): return Response(json.dumps(bin_files), mimetype="application/json") +@click.group(invoke_without_command=True) +@click.pass_context +def cli(ctx): + # allows the script to respond to command line input when executed directly + if ctx.invoked_subcommand is None: + click.echo(ctx.get_help()) + + +@cli.command() +@click.option("--port", default=5000, help="Port to run the server on") +@click.option("--debug", default=False, help="Enable debug mode") +def serve(port, debug): + print("Serving on http://localhost:{port}") + app.run(host="0.0.0.0", port=port, debug=debug) + + if __name__ == "__main__": - app.run(debug=True, threaded=True, port=5001) - app.run() + cli()