ggml : temporary disable llamafile sgemm until fixed

ggml-ci
This commit is contained in:
Georgi Gerganov 2024-04-16 22:41:03 +03:00
parent 8cc91dc63c
commit f02ea667c1
No known key found for this signature in database
GPG key ID: 449E073F9DC10735
3 changed files with 12 additions and 5 deletions

View file

@ -88,6 +88,7 @@ endif()
# 3rd party libs
option(LLAMA_ACCELERATE "llama: enable Accelerate framework" ON)
option(LLAMA_BLAS "llama: use BLAS" OFF)
option(LLAMA_LLAMAFILE "llama: use llamafile SGEMM" ON)
set(LLAMA_BLAS_VENDOR "Generic" CACHE STRING "llama: BLAS library vendor")
option(LLAMA_CUDA "llama: use CUDA" OFF)
option(LLAMA_CUBLAS "llama: use CUDA (deprecated, use LLAMA_CUDA)" OFF)
@ -286,6 +287,7 @@ if (LLAMA_METAL)
${METALKIT_FRAMEWORK}
)
endif()
if (LLAMA_BLAS)
if (LLAMA_STATIC)
set(BLA_STATIC ON)
@ -368,6 +370,11 @@ if (LLAMA_BLAS)
endif()
endif()
if (LLAMA_LLAMAFILE)
# TODO: temporary disabled until test-backend-ops becomes green
#add_compile_definitions(GGML_USE_LLAMAFILE)
endif()
if (LLAMA_QKK_64)
add_compile_definitions(GGML_QKK_64)
endif()