ggml : add predefined list of CPU backend variants to build (#10626)
* ggml : add predefined list of CPU backend variants to build * update CPU dockerfiles
This commit is contained in:
parent
2803540814
commit
59f4db1088
11 changed files with 502 additions and 391 deletions
|
@ -9,28 +9,20 @@ WORKDIR /app
|
|||
|
||||
COPY . .
|
||||
|
||||
|
||||
RUN \
|
||||
# Build multiple versions of the CPU backend
|
||||
scripts/build-cpu.sh avx -DGGML_AVX=ON -DGGML_AVX2=OFF && \
|
||||
scripts/build-cpu.sh avx2 -DGGML_AVX=ON -DGGML_AVX2=ON && \
|
||||
scripts/build-cpu.sh avx512 -DGGML_AVX=ON -DGGML_AVX2=ON -DGGML_AVX512=ON && \
|
||||
scripts/build-cpu.sh amx -DGGML_AVX=ON -DGGML_AVX2=ON -DGGML_AVX512=ON -DGGML_AVX_VNNI=ON -DGGML_AVX512_VNNI=ON -DGGML_AMX_TILE=ON -DGGML_AMX_INT8=ON && \
|
||||
# Build llama-server
|
||||
cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release && \
|
||||
cmake --build build --target llama-server -j $(nproc) && \
|
||||
# Copy the built libraries to /app/lib
|
||||
RUN cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release && \
|
||||
cmake --build build -j $(nproc) && \
|
||||
mkdir -p /app/lib && \
|
||||
mv libggml-cpu* /app/lib/ && \
|
||||
find build -name "*.so" -exec cp {} /app/lib/ \;
|
||||
|
||||
FROM ubuntu:$UBUNTU_VERSION AS runtime
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y libcurl4-openssl-dev libgomp1 curl
|
||||
|
||||
COPY --from=build /app/build/bin/llama-server /llama-server
|
||||
COPY --from=build /app/lib/ /
|
||||
COPY --from=build /app/build/bin/llama-server /app/
|
||||
COPY --from=build /app/lib/ /app/
|
||||
|
||||
ENV LC_ALL=C.utf8
|
||||
# Must be set to 0.0.0.0 so it can listen to requests from host machine
|
||||
|
@ -38,4 +30,4 @@ ENV LLAMA_ARG_HOST=0.0.0.0
|
|||
|
||||
HEALTHCHECK CMD [ "curl", "-f", "http://localhost:8080/health" ]
|
||||
|
||||
ENTRYPOINT [ "/llama-server" ]
|
||||
ENTRYPOINT [ "/app/llama-server" ]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue