Simplify .gitignore for tests, clang-tidy fixes

This commit is contained in:
crasm 2023-12-17 22:33:38 -05:00
parent fe6a6fb6d1
commit 6bba3410fa
3 changed files with 7 additions and 21 deletions

16
.gitignore vendored
View file

@ -86,19 +86,3 @@ examples/jeopardy/results.txt
poetry.lock poetry.lock
poetry.toml poetry.toml
# Test binaries
/tests/test-grammar-parser
/tests/test-llama-grammar
/tests/test-double-float
/tests/test-grad0
/tests/test-opt
/tests/test-quantize-fns
/tests/test-quantize-perf
/tests/test-sampling
/tests/test-tokenizer-0-llama
/tests/test-tokenizer-0-falcon
/tests/test-tokenizer-1-llama
/tests/test-tokenizer-1-bpe
/tests/test-rope
/tests/test-backend-ops

2
tests/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
*
!*.*

View file

@ -4,17 +4,17 @@
#include <cstdlib> #include <cstdlib>
int main(void) { int main(void) {
auto model_path = "models/7B/ggml-model-f16.gguf"; const auto * model_path = "models/7B/ggml-model-f16.gguf";
auto file = fopen(model_path, "r"); auto * file = fopen(model_path, "r");
if (file == nullptr) { if (file == nullptr) {
fprintf(stderr, "no model at '%s' found\n", model_path); fprintf(stderr, "no model at '%s' found\n", model_path);
return EXIT_FAILURE; return EXIT_FAILURE;
} else {
fprintf(stderr, "using '%s'\n", model_path);
fclose(file);
} }
fprintf(stderr, "using '%s'\n", model_path);
fclose(file);
llama_backend_init(false); llama_backend_init(false);
auto params = llama_model_params{}; auto params = llama_model_params{};
params.use_mmap = false; params.use_mmap = false;