Cache should raise KeyError when key is missing
This commit is contained in:
parent
24fc38754b
commit
5be0efa5f8
1 changed files with 2 additions and 4 deletions
|
@ -33,12 +33,10 @@ class LlamaCache:
|
|||
return k
|
||||
return None
|
||||
|
||||
def __getitem__(
|
||||
self, key: Sequence[llama_cpp.llama_token]
|
||||
) -> Optional["LlamaState"]:
|
||||
def __getitem__(self, key: Sequence[llama_cpp.llama_token]) -> "LlamaState":
|
||||
_key = self._find_key(tuple(key))
|
||||
if _key is None:
|
||||
return None
|
||||
raise KeyError(f"Key not found: {key}")
|
||||
return self.cache_state[_key]
|
||||
|
||||
def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool:
|
||||
|
|
Loading…
Reference in a new issue