From 906476f6ff07293ce53b08fd8ff6f7200aa7455c Mon Sep 17 00:00:00 2001 From: jaime-m-p <> Date: Thu, 4 Jul 2024 22:52:09 +0200 Subject: [PATCH] style: spaces --- tests/test-tokenizer-random.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test-tokenizer-random.py b/tests/test-tokenizer-random.py index e0a4cec05..48cab8a1e 100644 --- a/tests/test-tokenizer-random.py +++ b/tests/test-tokenizer-random.py @@ -25,11 +25,11 @@ logger = logging.getLogger("test-tokenizer-random") class LibLlama: DEFAULT_PATH_LLAMA_H = "./include/llama.h" - DEFAULT_PATH_INCLUDES = [ "./ggml/include/", "./include/" ] + DEFAULT_PATH_INCLUDES = ["./ggml/include/", "./include/"] DEFAULT_PATH_LIBLLAMA = "./build/src/libllama.so" # CMakeLists.txt: BUILD_SHARED_LIBS ON def __init__(self, path_llama_h: str = None, path_includes: list[str] = [], path_libllama: str = None): - path_llama_h = path_llama_h or self.DEFAULT_PATH_LLAMA_H + path_llama_h = path_llama_h or self.DEFAULT_PATH_LLAMA_H path_includes = path_includes or self.DEFAULT_PATH_INCLUDES path_libllama = path_libllama or self.DEFAULT_PATH_LIBLLAMA (self.ffi, self.lib) = self._load_libllama_cffi(path_llama_h, path_includes, path_libllama)