py : fix new type errors from master branch
This commit is contained in:
parent
0caf60a79e
commit
6f215f1f0d
2 changed files with 5 additions and 5 deletions
|
@ -3182,7 +3182,7 @@ class ChatGLMModel(Model):
|
||||||
def set_vocab_chatglm3(self):
|
def set_vocab_chatglm3(self):
|
||||||
dir_model = self.dir_model
|
dir_model = self.dir_model
|
||||||
hparams = self.hparams
|
hparams = self.hparams
|
||||||
tokens: list[bytearray] = []
|
tokens: list[bytes] = []
|
||||||
toktypes: list[int] = []
|
toktypes: list[int] = []
|
||||||
scores: list[float] = []
|
scores: list[float] = []
|
||||||
|
|
||||||
|
@ -3331,7 +3331,7 @@ class ChatGLMModel(Model):
|
||||||
special_vocab.add_to_gguf(self.gguf_writer)
|
special_vocab.add_to_gguf(self.gguf_writer)
|
||||||
|
|
||||||
def set_gguf_parameters(self):
|
def set_gguf_parameters(self):
|
||||||
self.gguf_writer.add_name(self.hparams.get("_name_or_path").split("/")[1]) # THUDM/glm4-9b-chat or THUDM/chatglm3-6b
|
self.gguf_writer.add_name(self.hparams["_name_or_path"].split("/")[1]) # THUDM/glm4-9b-chat or THUDM/chatglm3-6b
|
||||||
n_embed = self.hparams.get("hidden_size", self.hparams.get("n_embed"))
|
n_embed = self.hparams.get("hidden_size", self.hparams.get("n_embed"))
|
||||||
n_head = self.hparams.get("n_head", self.hparams.get("num_attention_heads"))
|
n_head = self.hparams.get("n_head", self.hparams.get("num_attention_heads"))
|
||||||
n_head_kv = self.hparams.get("multi_query_group_num", n_head)
|
n_head_kv = self.hparams.get("multi_query_group_num", n_head)
|
||||||
|
|
|
@ -63,9 +63,9 @@ def gguf_hash(reader: GGUFReader, filename: str, disable_progress_bar) -> None:
|
||||||
bar.update(sum_weights_in_tensor)
|
bar.update(sum_weights_in_tensor)
|
||||||
|
|
||||||
sha1_layer = hashlib.sha1()
|
sha1_layer = hashlib.sha1()
|
||||||
sha1_layer.update(tensor.data)
|
sha1_layer.update(tensor.data.data)
|
||||||
sha1.update(tensor.data)
|
sha1.update(tensor.data.data)
|
||||||
uuidv5_sha1.update(tensor.data)
|
uuidv5_sha1.update(tensor.data.data)
|
||||||
print("sha1 {0} {1}:{2}".format(sha1_layer.hexdigest(), filename, tensor.name)) # noqa: NP100
|
print("sha1 {0} {1}:{2}".format(sha1_layer.hexdigest(), filename, tensor.name)) # noqa: NP100
|
||||||
|
|
||||||
# Flush Hash Progress Bar
|
# Flush Hash Progress Bar
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue