refactor: Clean up variable names and separate concerns when downloading tokenizers

This commit is contained in:
teleprint-me 2024-05-17 23:27:30 -04:00
parent 5c8144e645
commit 1a286c8e21
No known key found for this signature in database
GPG key ID: B0D11345E65C4D48

View file

@ -43,13 +43,13 @@ class HuggingFaceHub:
def base_url(self) -> str: def base_url(self) -> str:
return self._base_url return self._base_url
def write_file(self, content: bytes, path: pathlib.Path) -> None: def write_file(self, content: bytes, filepath: pathlib.Path) -> None:
with open(path, 'wb') as f: with open(filepath, 'wb') as f:
f.write(content) f.write(content)
self.logger.info(f"Wrote {len(content)} bytes to {path} successfully") self.logger.info(f"Wrote {len(content)} bytes to {filepath} successfully")
def resolve_url(self, repo: str, file: str) -> str: def resolve_url(self, repo: str, filename: str) -> str:
return f"{self._base_url}/{repo}/resolve/main/{file}" return f"{self._base_url}/{repo}/resolve/main/{filename}"
def download_file(self, url: str) -> requests.Response: def download_file(self, url: str) -> requests.Response:
response = self._session.get(url, headers=self.headers) response = self._session.get(url, headers=self.headers)
@ -98,23 +98,32 @@ class HFTokenizerRequest:
def local_path(self, value: pathlib.Path): def local_path(self, value: pathlib.Path):
self._local_path = value self._local_path = value
def download_model(self) -> None: def resolve_filenames(self, tokt: TokenizerType) -> tuple[str]:
for model in self.models:
name, repo, tokt = model['name'], model['repo'], model['tokt']
os.makedirs(f"{self.local_path}/{name}", exist_ok=True)
filenames = ["config.json", "tokenizer_config.json", "tokenizer.json"] filenames = ["config.json", "tokenizer_config.json", "tokenizer.json"]
if tokt == self.tokenizer_type.SPM: if tokt == self.tokenizer_type.SPM:
filenames.append("tokenizer.model") filenames.append("tokenizer.model")
return tuple(filenames)
for file_name in filenames: def resolve_tokenizer_model(
file_path = pathlib.Path(f"{self.local_path}/{name}/{file_name}") self,
if file_path.is_file(): filename: str,
self.logger.info(f"skipped pre-existing tokenizer {name} at {file_path}") filepath: pathlib.Path,
continue model: dict[str, object]
) -> None:
try: # NOTE: Do not use bare exceptions! They mask issues! try: # NOTE: Do not use bare exceptions! They mask issues!
resolve_url = self.hub.resolve_url(repo, file_name) resolve_url = self.hub.resolve_url(model['repo'], filename)
response = self.hub.download_file(resolve_url) response = self.hub.download_file(resolve_url)
self.hub.write_file(response.content, file_path) self.hub.write_file(response.content, filepath)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.error(f"Failed to download tokenizer {name}: {e}") self.logger.error(f"Failed to download tokenizer {model['name']}: {e}")
def download_model(self) -> None:
for model in self.models:
os.makedirs(f"{self.local_path}/{model['name']}", exist_ok=True)
filenames = self.resolve_filenames(model['tokt'])
for filename in filenames:
filepath = pathlib.Path(f"{self.local_path}/{model['name']}/{filename}")
if filepath.is_file():
self.logger.info(f"skipped pre-existing tokenizer {model['name']} at {filepath}")
continue
self.resolve_tokenizer_model(filename, filepath, model)