From 230fc8b5352c2d64ff554ffa30a2704555532f08 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Tue, 21 Nov 2023 05:04:55 -0500 Subject: [PATCH] Bump version --- CHANGELOG.md | 12 ++++++++++++ llama_cpp/__init__.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41d47fd..2fbe1e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.2.19] + +- Update llama.cpp to ggerganov/llama.cpp@0b871f1a04ef60e114bbe43004fd9c21114e802d +- Fix #569: stop parameter in chat completion api should accept str by @abetlen in 128dc4731fa846ead7e684a137ca57d8931b8899 +- Document server host and port parameters by @jamesbraza in #768 +- Do not set grammar to None when initializing LlamaGrammar by @mthuurne in #834 +- Add mistrallite, intel, and openchat formats by @fakerybakery in #927 +- Add support for min_p parameter by @tk-master in #921 +- Fix #929: tokenizer adding leading space when generating from empty prompt by @abetlen in a34d48014192771d2e308a76c22f33bc0318d983 +- Fix low level api example by @zocainViken in #925 +- Fix missing package in openblas docker image by @ZisisTsatsas in #920 + ## [0.2.18] - Update llama.cpp to ggerganov/llama.cpp@6bb4908a17150b49373b5f977685b2e180a04f6f diff --git a/llama_cpp/__init__.py b/llama_cpp/__init__.py index 523280e..f429c3c 100644 --- a/llama_cpp/__init__.py +++ b/llama_cpp/__init__.py @@ -1,4 +1,4 @@ from .llama_cpp import * from .llama import * -__version__ = "0.2.18" \ No newline at end of file +__version__ = "0.2.19" \ No newline at end of file