examples: add basic python example
This commit is contained in:
parent
a8dc0c9b5f
commit
993cb9fad6
2 changed files with 47 additions and 0 deletions
15
examples/python/README.md
Normal file
15
examples/python/README.md
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Python
|
||||||
|
|
||||||
|
This is a simple example of calling the Ollama api from a python app.
|
||||||
|
|
||||||
|
First, download a model:
|
||||||
|
|
||||||
|
```
|
||||||
|
curl -L https://huggingface.co/TheBloke/orca_mini_3B-GGML/resolve/main/orca-mini-3b.ggmlv3.q4_1.bin -o orca.bin
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run it using the example script. You'll need to have Ollama running on your machine.
|
||||||
|
|
||||||
|
```
|
||||||
|
python3 main.py orca.bin
|
||||||
|
```
|
32
examples/python/main.py
Normal file
32
examples/python/main.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import http.client
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python main.py <model file>")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
conn = http.client.HTTPConnection('localhost', 11434)
|
||||||
|
|
||||||
|
headers = { 'Content-Type': 'application/json' }
|
||||||
|
|
||||||
|
# generate text from the model
|
||||||
|
conn.request("POST", "/api/generate", json.dumps({
|
||||||
|
'model': os.path.join(os.getcwd(), sys.argv[1]),
|
||||||
|
'prompt': 'write me a short story',
|
||||||
|
'stream': True
|
||||||
|
}), headers)
|
||||||
|
|
||||||
|
response = conn.getresponse()
|
||||||
|
|
||||||
|
def parse_generate(data):
|
||||||
|
for event in data.decode('utf-8').split("\n"):
|
||||||
|
if not event:
|
||||||
|
continue
|
||||||
|
yield event
|
||||||
|
|
||||||
|
if response.status == 200:
|
||||||
|
for chunk in response:
|
||||||
|
for event in parse_generate(chunk):
|
||||||
|
print(json.loads(event)['response'], end="", flush=True)
|
Loading…
Reference in a new issue