diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index e08617ba2..de7ccd29b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -167,6 +167,7 @@ class Keys: CHAT_TEMPLATE = "tokenizer.chat_template" CHAT_TEMPLATE_N = "tokenizer.chat_template.{name}" CHAT_TEMPLATES = "tokenizer.chat_templates" + INVERSE_TEMPLATE = "tokenizer.inverse_template" # FIM/Infill special tokens constants PREFIX_ID = "tokenizer.ggml.prefix_token_id" SUFFIX_ID = "tokenizer.ggml.suffix_token_id" diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 5c460ef1b..51f93cd02 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -843,6 +843,9 @@ class GGUFWriter: self.add_string(Keys.Tokenizer.CHAT_TEMPLATE, value) + def add_inverse_template(self, value: str) -> None: + self.add_string(Keys.Tokenizer.INVERSE_TEMPLATE, value) + def add_prefix_token_id(self, id: int) -> None: self.add_uint32(Keys.Tokenizer.PREFIX_ID, id) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index f2645f921..6406048a5 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -21,6 +21,7 @@ class SpecialVocab: add_special_token: dict[str, bool] special_token_ids: dict[str, int] chat_template: str | Sequence[Mapping[str, str]] | None + inverse_template: str | None def __init__( self, path: str | os.PathLike[str], load_merges: bool = False, @@ -33,6 +34,7 @@ class SpecialVocab: self.load_merges = load_merges self.merges = [] self.chat_template = None + self.inverse_template = None if special_token_types is not None: self.special_token_types = special_token_types else: @@ -71,6 +73,10 @@ class SpecialVocab: if not quiet: logger.info(f'Setting chat_template to {self.chat_template}') gw.add_chat_template(self.chat_template) + if self.inverse_template is not None: + if not quiet: + logger.info(f'Setting inverse_template to {self.inverse_template}') + gw.add_inverse_template(self.inverse_template) def _load(self, path: Path) -> None: self._try_load_from_tokenizer_json(path) @@ -159,6 +165,11 @@ class SpecialVocab: self.chat_template = chat_template else: logger.warning(f'Bad type for chat_template field in {tokenizer_config_file!r} - ignoring') + inverse_template = tokenizer_config.get('inverse_template') + if inverse_template is None or isinstance(inverse_template, str): + self.inverse_template = inverse_template + else: + logger.warning(f'Bad type for inverse_template field in {tokenizer_config_file!r} - ignoring') for typ in self.special_token_types: add_entry = tokenizer_config.get(f'add_{typ}_token') if isinstance(add_entry, bool): diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index 33cfe26b7..10e94876c 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gguf" -version = "0.10.0" +version = "0.11.0" description = "Read and write ML models in GGUF for GGML" authors = ["GGML "] packages = [ diff --git a/gguf-py/scripts/gguf_new_metadata.py b/gguf-py/scripts/gguf_new_metadata.py index fce52a8c1..a004f0dd1 100755 --- a/gguf-py/scripts/gguf_new_metadata.py +++ b/gguf-py/scripts/gguf_new_metadata.py @@ -85,7 +85,7 @@ def copy_with_new_metadata(reader: gguf.GGUFReader, writer: gguf.GGUFWriter, new continue # Skip old chat templates if we have new ones - if field.name.startswith(gguf.Keys.Tokenizer.CHAT_TEMPLATE) and gguf.Keys.Tokenizer.CHAT_TEMPLATE in new_metadata: + if (field.name.startswith(gguf.Keys.Tokenizer.CHAT_TEMPLATE) and gguf.Keys.Tokenizer.CHAT_TEMPLATE in new_metadata) or (field.name.startswith(gguf.Keys.Tokenizer.INVERSE_TEMPLATE) and gguf.Keys.Tokenizer.INVERSE_TEMPLATE in new_metadata): logger.debug(f'Skipping {field.name}') continue @@ -110,6 +110,11 @@ def copy_with_new_metadata(reader: gguf.GGUFReader, writer: gguf.GGUFWriter, new writer.add_chat_template(new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE].value) del new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] + if gguf.Keys.Tokenizer.INVERSE_TEMPLATE in new_metadata: + logger.debug('Adding inverse template') + writer.add_inverse_template(new_metadata[gguf.Keys.Tokenizer.INVERSE_TEMPLATE].value) + del new_metadata[gguf.Keys.Tokenizer.INVERSE_TEMPLATE] + for key, val in new_metadata.items(): logger.debug(f'Adding {key}: "{val.value}" {val.description}') writer.add_key_value(key, val.value, val.type) @@ -143,7 +148,8 @@ def main() -> None: parser.add_argument("--general-name", type=str, help="The models general.name", metavar='"name"') parser.add_argument("--general-description", type=str, help="The models general.description", metavar='"Description ..."') parser.add_argument("--chat-template", type=str, help="Chat template string (or JSON string containing templates)", metavar='"{% ... %} ..."') - parser.add_argument("--chat-template-config", type=Path, help="Config file containing chat template(s)", metavar='tokenizer_config.json') + parser.add_argument("--inverse-template", type=str, help="Inverse template string", metavar='"{% ... %} ..."') + parser.add_argument("--chat-template-config", type=Path, help="Config file containing chat and/or inverse template(s)", metavar='tokenizer_config.json') parser.add_argument("--pre-tokenizer", type=str, help="The models tokenizer.ggml.pre", metavar='"pre tokenizer"') parser.add_argument("--remove-metadata", action="append", type=str, help="Remove metadata (by key name) from output model", metavar='general.url') parser.add_argument("--special-token", action="append", type=str, help="Special token by value", nargs=2, metavar=(' | '.join(token_names.keys()), '""')) @@ -166,12 +172,18 @@ def main() -> None: if args.chat_template: new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, json.loads(args.chat_template) if args.chat_template.startswith('[') else args.chat_template) + if args.inverse_template: + new_metadata[gguf.Keys.Tokenizer.INVERSE_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, args.inverse_template) + if args.chat_template_config: with open(args.chat_template_config, 'r') as fp: config = json.load(fp) - template = config.get('chat_template') - if template: - new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, template) + chat_template = config.get('chat_template') + inverse_template = config.get('inverse_template') + if chat_template: + new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, chat_template) + if inverse_template: + new_metadata[gguf.Keys.Tokenizer.INVERSE_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, inverse_template) if args.pre_tokenizer: new_metadata[gguf.Keys.Tokenizer.PRE] = MetadataDetails(gguf.GGUFValueType.STRING, args.pre_tokenizer)