refactor: Apply consistent naming conventions
This commit is contained in:
parent
9ba6b92c2d
commit
0ccf579242
1 changed files with 19 additions and 11 deletions
|
@ -868,6 +868,14 @@ class LlamaFileType(IntEnum):
|
||||||
GUESSED = 1024 # not specified in the model file
|
GUESSED = 1024 # not specified in the model file
|
||||||
|
|
||||||
|
|
||||||
|
LLAMA_FILE_TYPE_NAMES: dict[LlamaFileType, str] = {
|
||||||
|
LlamaFileType.ALL_F32 : "F32",
|
||||||
|
LlamaFileType.MOSTLY_F16 : "F16",
|
||||||
|
LlamaFileType.MOSTLY_BF16 : "BF16",
|
||||||
|
LlamaFileType.MOSTLY_Q8_0 : "Q8_0",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class GGUFEndian(IntEnum):
|
class GGUFEndian(IntEnum):
|
||||||
LITTLE = 0
|
LITTLE = 0
|
||||||
BIG = 1
|
BIG = 1
|
||||||
|
@ -943,24 +951,24 @@ GGML_QUANT_SIZES: dict[GGMLQuantizationType, tuple[int, int]] = {
|
||||||
#
|
#
|
||||||
# Tokenizer Types
|
# Tokenizer Types
|
||||||
#
|
#
|
||||||
class VOCAB_TYPE(IntEnum):
|
class VocabType(IntEnum):
|
||||||
NON = auto() # For models without vocab
|
NON = auto() # For models without vocab
|
||||||
SPM = auto() # SentencePiece LLaMa tokenizer
|
SPM = auto() # SentencePiece LLaMa tokenizer
|
||||||
BPE = auto() # BytePair GPT-2 tokenizer
|
BPE = auto() # BytePair GPT-2 tokenizer
|
||||||
WPM = auto() # WordPiece BERT tokenizer
|
WPM = auto() # WordPiece BERT tokenizer
|
||||||
|
|
||||||
|
|
||||||
VOCAB_TYPE_NAMES: dict[VOCAB_TYPE, str] = {
|
VOCAB_TYPE_NAMES: dict[VocabType, str] = {
|
||||||
VOCAB_TYPE.SPM: "SPM",
|
VocabType.SPM: "SPM",
|
||||||
VOCAB_TYPE.BPE: "BPE",
|
VocabType.BPE: "BPE",
|
||||||
VOCAB_TYPE.WPM: "WPM",
|
VocabType.WPM: "WPM",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Model File Types
|
# Model File Types
|
||||||
#
|
#
|
||||||
class MODEL_FILE_TYPE(IntEnum):
|
class ModelFileType(IntEnum):
|
||||||
UNK = auto() # Unsupported file type
|
UNK = auto() # Unsupported file type
|
||||||
SFT = auto() # SafeTensor file type
|
SFT = auto() # SafeTensor file type
|
||||||
PTH = auto() # PyTorch file type
|
PTH = auto() # PyTorch file type
|
||||||
|
@ -968,11 +976,11 @@ class MODEL_FILE_TYPE(IntEnum):
|
||||||
PT = auto() # PyTorch file type
|
PT = auto() # PyTorch file type
|
||||||
|
|
||||||
|
|
||||||
MODEL_FILE_TYPE_NAMES: dict[MODEL_FILE_TYPE, str] = {
|
MODEL_FILE_TYPE_NAMES: dict[ModelFileType, str] = {
|
||||||
MODEL_FILE_TYPE.PT: "pt",
|
ModelFileType.PT: "pt",
|
||||||
MODEL_FILE_TYPE.PTH: "pth",
|
ModelFileType.PTH: "pth",
|
||||||
MODEL_FILE_TYPE.BIN: "bin",
|
ModelFileType.BIN: "bin",
|
||||||
MODEL_FILE_TYPE.SFT: "safetensors",
|
ModelFileType.SFT: "safetensors",
|
||||||
}
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue