llama.cpp/llama_cpp/llama_types.py
2023-04-01 12:59:58 -04:00

57 lines
1.1 KiB
Python

from typing import List, Optional, Dict, Literal
from typing_extensions import TypedDict
class EmbeddingUsage(TypedDict):
prompt_tokens: int
total_tokens: int
class EmbeddingData(TypedDict):
index: int
object: str
embedding: List[float]
class Embedding(TypedDict):
object: Literal["list"]
model: str
data: List[EmbeddingData]
usage: EmbeddingUsage
class CompletionLogprobs(TypedDict):
text_offset: List[int]
token_logprobs: List[float]
tokens: List[str]
top_logprobs: List[Dict[str, float]]
class CompletionChoice(TypedDict):
text: str
index: int
logprobs: Optional[CompletionLogprobs]
finish_reason: Optional[str]
class CompletionUsage(TypedDict):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class CompletionChunk(TypedDict):
id: str
object: Literal["text_completion"]
created: int
model: str
choices: List[CompletionChoice]
class Completion(TypedDict):
id: str
object: Literal["text_completion"]
created: int
model: str
choices: List[CompletionChoice]
usage: CompletionUsage