Get llama.com building as an aarch64 native binary

This commit is contained in:
Justine Tunney 2023-05-09 12:14:57 -07:00
parent d04430f4ef
commit 4c093155a3
No known key found for this signature in database
GPG key ID: BE714B4575D6E328
40 changed files with 7842 additions and 11683 deletions

View file

@ -38,6 +38,7 @@
#include "libc/sysv/consts/clock.h"
#include "libc/thread/thread.h"
#include "libc/time/time.h"
#include "third_party/aarch64/arm_neon.h"
#include "third_party/intel/immintrin.internal.h"
#include "third_party/libcxx/math.h"

View file

@ -27,7 +27,8 @@ THIRD_PARTY_GGML_A_DIRECTDEPS = \
LIBC_STR \
LIBC_STUBS \
LIBC_SYSV \
LIBC_TINYMATH
LIBC_TINYMATH \
THIRD_PARTY_COMPILER_RT
THIRD_PARTY_GGML_A_DEPS := \
$(call uniq,$(foreach x,$(THIRD_PARTY_GGML_A_DIRECTDEPS),$($(x))))

View file

@ -99,14 +99,15 @@ int main(int argc, char ** argv) {
params.model = "models/llama-7B/ggml-model.bin";
#ifdef __x86_64__
if (!X86_HAVE(AVX2)) return on_missing_feature("avx2");
if (!X86_HAVE(AVX)) return on_missing_feature("avx");
if (!X86_HAVE(FMA)) return on_missing_feature("fma");
if (!X86_HAVE(SSE3)) return on_missing_feature("sse3");
if (!X86_HAVE(F16C)) {
fprintf(stderr, "%s: warning: cpuid f16c not detected; inference might crash\n", __func__);
}
#endif /* __x86_64__ */
if (gpt_params_parse(argc, argv, params) == false) {
return 1;