feature: allow all BLA_VENDOR to be assigned in cmake arguments. align with whisper.cpp pr 927

This commit is contained in:
zenix 2023-05-19 16:04:30 +09:00
parent 0926278434
commit ee72eafdb9
4 changed files with 26 additions and 47 deletions

View file

@ -39,7 +39,7 @@ CMake:
```bash
mkdir build
cd build
cmake -DLLAMA_BLIS=ON ..
cmake -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=FLAME ..
make -j
```

View file

@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.12) # Don't bump this version for no reason
cmake_minimum_required(VERSION 3.25) # Don't bump this version for no reason
project("llama.cpp" C CXX)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
@ -65,8 +65,8 @@ endif()
# 3rd party libs
option(LLAMA_ACCELERATE "llama: enable Accelerate framework" ON)
option(LLAMA_OPENBLAS "llama: use OpenBLAS" OFF)
option(LLAMA_BLIS "llama: use blis" OFF)
option(LLAMA_BLAS "llama: use BLAS" OFF)
option(LLAMA_BLAS_VENDOR "llama: BLA_VENDOR from https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors" Generic)
option(LLAMA_CUBLAS "llama: use cuBLAS" OFF)
option(LLAMA_CLBLAST "llama: use CLBlast" OFF)
@ -146,57 +146,26 @@ if (APPLE AND LLAMA_ACCELERATE)
endif()
endif()
if (LLAMA_OPENBLAS)
if (LLAMA_BLAS)
if (LLAMA_STATIC)
set(BLA_STATIC ON)
endif()
set(BLA_VENDOR OpenBLAS)
set(BLA_SIZEOF_INTEGRER 8)
set(BLA_VENDOR ${LLAMA_BLAS_VENDOR})
find_package(BLAS)
if (BLAS_FOUND)
message(STATUS "OpenBLAS found")
message(STATUS "BLAS found, Libraries: ${BLAS_LIBRARIES}")
add_compile_definitions(GGML_USE_OPENBLAS)
add_link_options(${BLAS_LIBRARIES})
set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} openblas)
set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${BLAS_LIBRARIES})
# find header file
set(OPENBLAS_INCLUDE_SEARCH_PATHS
/usr/include
/usr/include/openblas
/usr/include/openblas-base
/usr/local/include
/usr/local/include/openblas
/usr/local/include/openblas-base
/opt/OpenBLAS/include
$ENV{OpenBLAS_HOME}
$ENV{OpenBLAS_HOME}/include
)
find_path(OPENBLAS_INC NAMES cblas.h PATHS ${OPENBLAS_INCLUDE_SEARCH_PATHS})
add_compile_options(-I${OPENBLAS_INC})
message("${BLAS_LIBRARIES}")
include_directories(${BLAS_INCLUDE_DIRS})
else()
message(WARNING "OpenBLAS not found")
message(WARNING "BLAS not found, please refer to https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors to set correct LLAMA_BLAS_VENDOR")
endif()
endif()
if (LLAMA_BLIS)
add_compile_definitions(GGML_USE_BLIS)
# we don't directly call BLIS apis, use cblas wrapper instead
add_compile_definitions(GGML_USE_OPENBLAS)
set(BLIS_INCLUDE_SEARCH_PATHS
/usr/include
/usr/include/blis
/usr/local/include
/usr/local/include/blis
$ENV{BLIS_HOME}
$ENV{BLIS_HOME}/include
)
find_path(BLIS_INC NAMES blis.h PATHS ${BLIS_INCLUDE_SEARCH_PATHS})
add_compile_definitions(BLIS_ENABLE_CBLAS)
add_link_options(-lblis)
add_compile_options(-I${BLIS_INC})
endif()
if (LLAMA_CUBLAS)
cmake_minimum_required(VERSION 3.17)

View file

@ -123,7 +123,7 @@ ifdef LLAMA_OPENBLAS
endif
endif
ifdef LLAMA_BLIS
CFLAGS += -DGGML_USE_OPENBLAS -DGGML_USE_BLIS -I/usr/local/include/blis -I/usr/include/blis
CFLAGS += -DGGML_USE_OPENBLAS -I/usr/local/include/blis -I/usr/include/blis
LDFLAGS += -lblis -L/usr/local/lib
endif
ifdef LLAMA_CUBLAS

View file

@ -56,9 +56,8 @@ The main goal of `llama.cpp` is to run the LLaMA model using 4-bit integer quant
- Mixed F16 / F32 precision
- 4-bit, 5-bit and 8-bit integer quantization support
- Runs on the CPU
- OpenBLAS support
- Supports OpenBLAS/Apple BLAS/ARM Performance Lib/ATLAS/BLIS/Intel MKL/NVHPC/ACML/SCSL/SGIMATH and [more](https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors) in BLAS
- cuBLAS and CLBlast support
- BLIS support (cblas wrapper)
The original implementation of `llama.cpp` was [hacked in an evening](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022).
Since then, the project has improved significantly thanks to many contributions. This project is for educational purposes and serves
@ -275,7 +274,7 @@ Building the program with BLAS support may lead to some performance improvements
```bash
mkdir build
cd build
cmake .. -DLLAMA_OPENBLAS=ON
cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS
cmake --build . --config Release
```
@ -283,6 +282,17 @@ Building the program with BLAS support may lead to some performance improvements
Check [BLIS.md](BLIS.md) for more information.
- Intel MKL
By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. You may also specify it by:
```bash
mkdir build
cd build
cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx
cmake --build . -config Release
```
- cuBLAS
This provides BLAS acceleration using the CUDA cores of your Nvidia GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager or from here: [CUDA Toolkit](https://developer.nvidia.com/cuda-downloads).