Address comments
This commit is contained in:
parent
b8c86b1095
commit
56f2a80f16
1 changed files with 3 additions and 92 deletions
95
.github/workflows/server-convert-and-infer.yml
vendored
95
.github/workflows/server-convert-and-infer.yml
vendored
|
@ -28,15 +28,6 @@ jobs:
|
||||||
server:
|
server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
sanitizer: [ADDRESS, UNDEFINED] # THREAD is broken
|
|
||||||
build_type: [RelWithDebInfo]
|
|
||||||
include:
|
|
||||||
- build_type: Release
|
|
||||||
sanitizer: ""
|
|
||||||
fail-fast: false # While -DLLAMA_SANITIZE_THREAD=ON is broken
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Dependencies
|
- name: Dependencies
|
||||||
id: depends
|
id: depends
|
||||||
|
@ -44,13 +35,11 @@ jobs:
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
build-essential \
|
build-essential \
|
||||||
xxd \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
curl \
|
curl \
|
||||||
wget \
|
curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash
|
||||||
language-pack-en \
|
sudo apt-get install -y git-lfs
|
||||||
libcurl4-openssl-dev
|
|
||||||
|
|
||||||
- name: Clone
|
- name: Clone
|
||||||
id: checkout
|
id: checkout
|
||||||
|
@ -70,41 +59,11 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
pip install -r requirements/requirements-all.txt
|
pip install -r requirements/requirements-all.txt
|
||||||
|
|
||||||
- name: Verify server deps
|
|
||||||
id: verify_server_deps
|
|
||||||
run: |
|
|
||||||
git config --global --add safe.directory $(realpath .)
|
|
||||||
cd examples/server
|
|
||||||
git ls-files --others --modified
|
|
||||||
git status
|
|
||||||
./deps.sh
|
|
||||||
git status
|
|
||||||
not_ignored_files="$(git ls-files --others --modified)"
|
|
||||||
echo "Modified files: ${not_ignored_files}"
|
|
||||||
if [ -n "${not_ignored_files}" ]; then
|
|
||||||
echo "Repository is dirty or server deps are not built as expected"
|
|
||||||
echo "${not_ignored_files}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Build (no OpenMP)
|
|
||||||
id: cmake_build_no_openmp
|
|
||||||
if: ${{ matrix.sanitizer == 'THREAD' }}
|
|
||||||
run: |
|
|
||||||
cmake -B build \
|
|
||||||
-DGGML_NATIVE=OFF \
|
|
||||||
-DLLAMA_BUILD_SERVER=ON \
|
|
||||||
-DLLAMA_CURL=ON \
|
|
||||||
-DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
|
|
||||||
-DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
|
|
||||||
-DGGML_OPENMP=OFF ;
|
|
||||||
cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target llama-server
|
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
id: cmake_build
|
id: cmake_build
|
||||||
if: ${{ matrix.sanitizer != 'THREAD' }}
|
if: ${{ matrix.sanitizer != 'THREAD' }}
|
||||||
run: |
|
run: |
|
||||||
cmake -B build \
|
make llama-cli llama-export-lora \
|
||||||
-DGGML_NATIVE=OFF \
|
-DGGML_NATIVE=OFF \
|
||||||
-DLLAMA_BUILD_SERVER=ON \
|
-DLLAMA_BUILD_SERVER=ON \
|
||||||
-DLLAMA_CURL=ON \
|
-DLLAMA_CURL=ON \
|
||||||
|
@ -116,51 +75,3 @@ jobs:
|
||||||
id: test_lora_conversion_inference
|
id: test_lora_conversion_inference
|
||||||
if: ${{ (github.event.schedule || github.event.inputs.slow_tests == 'true') && matrix.build_type == 'Release' }}
|
if: ${{ (github.event.schedule || github.event.inputs.slow_tests == 'true') && matrix.build_type == 'Release' }}
|
||||||
run: ./tests/test-lora-conversion-inference.sh
|
run: ./tests/test-lora-conversion-inference.sh
|
||||||
|
|
||||||
|
|
||||||
server-windows:
|
|
||||||
runs-on: windows-2019
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Clone
|
|
||||||
id: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ github.event.inputs.sha || github.event.pull_request.head.sha || github.sha || github.head_ref || github.ref_name }}
|
|
||||||
|
|
||||||
- name: libCURL
|
|
||||||
id: get_libcurl
|
|
||||||
env:
|
|
||||||
CURL_VERSION: 8.6.0_6
|
|
||||||
run: |
|
|
||||||
curl.exe -o $env:RUNNER_TEMP/curl.zip -L "https://curl.se/windows/dl-${env:CURL_VERSION}/curl-${env:CURL_VERSION}-win64-mingw.zip"
|
|
||||||
mkdir $env:RUNNER_TEMP/libcurl
|
|
||||||
tar.exe -xvf $env:RUNNER_TEMP/curl.zip --strip-components=1 -C $env:RUNNER_TEMP/libcurl
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
id: cmake_build
|
|
||||||
run: |
|
|
||||||
cmake -B build -DLLAMA_CURL=ON -DCURL_LIBRARY="$env:RUNNER_TEMP/libcurl/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:RUNNER_TEMP/libcurl/include"
|
|
||||||
cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target llama-server
|
|
||||||
|
|
||||||
- name: Python setup
|
|
||||||
id: setup_python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Tests dependencies
|
|
||||||
id: test_dependencies
|
|
||||||
run: |
|
|
||||||
pip install -r examples/server/tests/requirements.txt
|
|
||||||
|
|
||||||
- name: Copy Libcurl
|
|
||||||
id: prepare_libcurl
|
|
||||||
run: |
|
|
||||||
cp $env:RUNNER_TEMP/libcurl/bin/libcurl-x64.dll ./build/bin/Release/libcurl-x64.dll
|
|
||||||
|
|
||||||
- name: Lora convert and inference tests
|
|
||||||
id: test_lora_conversion_inference
|
|
||||||
if: ${{ (github.event.schedule || github.event.inputs.slow_tests == 'true') && matrix.build_type == 'Release' }}
|
|
||||||
run: ./tests/test-lora-conversion-inference.sh
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue