From 37556bf9c4f83f51e76682316ff4ea3aed58a279 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 22 Dec 2023 14:55:58 -0500 Subject: [PATCH] Bump version --- CHANGELOG.md | 7 +++++++ llama_cpp/__init__.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 97404bf..13454a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.2.25] + +- feat(server): Multi model support by @D4ve-R in #931 +- feat(server): Support none defaulting to infinity for completions by @swg in #111 +- feat(server): Implement openai api compatible authentication by @docmeth2 in #1010 +- fix: text_offset of multi-token characters by @twaka in #1037 +- fix: ctypes bindings for kv override by @phiharri in #1011 - fix: ctypes definitions of llama_kv_cache_view_update and llama_kv_cache_view_free. by @e-c-d in #1028 ## [0.2.24] diff --git a/llama_cpp/__init__.py b/llama_cpp/__init__.py index 00031ba..8521e77 100644 --- a/llama_cpp/__init__.py +++ b/llama_cpp/__init__.py @@ -1,4 +1,4 @@ from .llama_cpp import * from .llama import * -__version__ = "0.2.24" \ No newline at end of file +__version__ = "0.2.25" \ No newline at end of file