Refactor Llama class internals

This commit is contained in:
Andrei Betlen 2023-11-06 09:16:36 -05:00
parent bbffdaebaa
commit e214a58422
2 changed files with 641 additions and 312 deletions

File diff suppressed because it is too large Load diff

View file

@ -8,7 +8,7 @@ def test_llama_cpp_tokenization():
llama = llama_cpp.Llama(model_path=MODEL, vocab_only=True, verbose=False)
assert llama
assert llama.ctx is not None
assert llama._ctx.ctx is not None
text = b"Hello World"
@ -37,7 +37,7 @@ def test_llama_cpp_tokenization():
def test_llama_patch(monkeypatch):
llama = llama_cpp.Llama(model_path=MODEL, vocab_only=True)
n_vocab = llama_cpp.llama_n_vocab(llama.model)
n_vocab = llama_cpp.llama_n_vocab(llama._model.model)
## Set up mock function
def mock_eval(*args, **kwargs):