From 864a99e7a01d9422d2f55618dbe62c8099a2175c Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Mon, 10 Jun 2024 18:32:10 -0400 Subject: [PATCH 01/18] cmake : fix CMake requirement for CUDA (#7821) --- CMakeLists.txt | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b1d6afbbc..8e280f87d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -402,12 +402,26 @@ if (LLAMA_CUBLAS) endif() if (LLAMA_CUDA) - cmake_minimum_required(VERSION 3.17) + cmake_minimum_required(VERSION 3.18) # for CMAKE_CUDA_ARCHITECTURES find_package(CUDAToolkit) if (CUDAToolkit_FOUND) message(STATUS "CUDA found") + if (NOT DEFINED CMAKE_CUDA_ARCHITECTURES) + # 52 == lowest CUDA 12 standard + # 60 == f16 CUDA intrinsics + # 61 == integer CUDA intrinsics + # 70 == compute capability at which unrolling a loop in mul_mat_q kernels is faster + if (LLAMA_CUDA_F16 OR LLAMA_CUDA_DMMV_F16) + set(CMAKE_CUDA_ARCHITECTURES "60;61;70") # needed for f16 CUDA intrinsics + else() + set(CMAKE_CUDA_ARCHITECTURES "52;61;70") # lowest CUDA 12 standard + lowest for integer intrinsics + #set(CMAKE_CUDA_ARCHITECTURES "OFF") # use this to compile much faster, but only F16 models work + endif() + endif() + message(STATUS "Using CUDA architectures: ${CMAKE_CUDA_ARCHITECTURES}") + enable_language(CUDA) set(GGML_HEADERS_CUDA ggml-cuda.h) @@ -472,21 +486,6 @@ if (LLAMA_CUDA) else() set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cuda_driver) # required by cuDeviceGetAttribute(), cuMemGetAllocationGranularity(...), ... endif() - - if (NOT DEFINED CMAKE_CUDA_ARCHITECTURES) - # 52 == lowest CUDA 12 standard - # 60 == f16 CUDA intrinsics - # 61 == integer CUDA intrinsics - # 70 == compute capability at which unrolling a loop in mul_mat_q kernels is faster - if (LLAMA_CUDA_F16 OR LLAMA_CUDA_DMMV_F16) - set(CMAKE_CUDA_ARCHITECTURES "60;61;70") # needed for f16 CUDA intrinsics - else() - set(CMAKE_CUDA_ARCHITECTURES "52;61;70") # lowest CUDA 12 standard + lowest for integer intrinsics - #set(CMAKE_CUDA_ARCHITECTURES "") # use this to compile much faster, but only F16 models work - endif() - endif() - message(STATUS "Using CUDA architectures: ${CMAKE_CUDA_ARCHITECTURES}") - else() message(WARNING "CUDA not found") endif() From 396b18dfec2c56846e80362db70af09b9e1d70ba Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 11 Jun 2024 01:00:30 +0100 Subject: [PATCH 02/18] `json`: document schema conversion in GBNF readme, align manual grammar examples & converters (#7841) * json: fix char pattern in grammar converters * json: prevent number precision & whitespace runaways in example grammars * json: add doc to grammar readme --- common/json-schema-to-grammar.cpp | 2 +- examples/json_schema_to_grammar.py | 2 +- .../server/public/json-schema-to-grammar.mjs | 2 +- grammars/README.md | 39 +++++++++++++++++++ grammars/json.gbnf | 6 +-- grammars/json_arr.gbnf | 6 +-- tests/test-json-schema-to-grammar.cpp | 38 +++++++++--------- 7 files changed, 67 insertions(+), 28 deletions(-) diff --git a/common/json-schema-to-grammar.cpp b/common/json-schema-to-grammar.cpp index 737bae27c..11221a32f 100644 --- a/common/json-schema-to-grammar.cpp +++ b/common/json-schema-to-grammar.cpp @@ -57,7 +57,7 @@ std::unordered_map PRIMITIVE_RULES = { {"object", {"\"{\" space ( string \":\" space value (\",\" space string \":\" space value)* )? \"}\" space", {"string", "value"}}}, {"array", {"\"[\" space ( value (\",\" space value)* )? \"]\" space", {"value"}}}, {"uuid", {"\"\\\"\" [0-9a-fA-F]{8} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{4} \"-\" [0-9a-fA-F]{12} \"\\\"\" space", {}}}, - {"char", {"[^\"\\\\] | \"\\\\\" ([\"\\\\/bfnrt] | \"u\" [0-9a-fA-F]{4})", {}}}, + {"char", {"[^\"\\\\\\x7F\\x00-\\x1F] | [\\\\] ([\"\\\\bfnrt] | \"u\" [0-9a-fA-F]{4})", {}}}, {"string", {"\"\\\"\" char* \"\\\"\" space", {"char"}}}, {"null", {"\"null\" space", {}}}, }; diff --git a/examples/json_schema_to_grammar.py b/examples/json_schema_to_grammar.py index 7d889c3fe..cd444d010 100755 --- a/examples/json_schema_to_grammar.py +++ b/examples/json_schema_to_grammar.py @@ -43,7 +43,7 @@ PRIMITIVE_RULES = { 'object' : BuiltinRule('"{" space ( string ":" space value ("," space string ":" space value)* )? "}" space', ['string', 'value']), 'array' : BuiltinRule('"[" space ( value ("," space value)* )? "]" space', ['value']), 'uuid' : BuiltinRule(r'"\"" [0-9a-fA-F]{8} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{12} "\"" space', []), - 'char' : BuiltinRule(r'[^"\\] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F]{4})', []), + 'char' : BuiltinRule(r'[^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})', []), 'string' : BuiltinRule(r'"\"" char* "\"" space', ['char']), 'null' : BuiltinRule('"null" space', []), } diff --git a/examples/server/public/json-schema-to-grammar.mjs b/examples/server/public/json-schema-to-grammar.mjs index cef11eab8..dc2468396 100644 --- a/examples/server/public/json-schema-to-grammar.mjs +++ b/examples/server/public/json-schema-to-grammar.mjs @@ -41,7 +41,7 @@ const PRIMITIVE_RULES = { object : new BuiltinRule('"{" space ( string ":" space value ("," space string ":" space value)* )? "}" space', ['string', 'value']), array : new BuiltinRule('"[" space ( value ("," space value)* )? "]" space', ['value']), uuid : new BuiltinRule('"\\"" [0-9a-fA-F]{8} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{4} "-" [0-9a-fA-F]{12} "\\"" space', []), - char : new BuiltinRule(`[^"\\\\] | "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F]{4})`, []), + char : new BuiltinRule(`[^"\\\\\\x7F\\x00-\\x1F] | [\\\\] (["\\\\bfnrt] | "u" [0-9a-fA-F]{4})`, []), string : new BuiltinRule(`"\\"" char* "\\"" space`, ['char']), null : new BuiltinRule('"null" space', []), }; diff --git a/grammars/README.md b/grammars/README.md index 3ffc7cec0..2ec21a4c0 100644 --- a/grammars/README.md +++ b/grammars/README.md @@ -94,6 +94,8 @@ This guide provides a brief overview. Check out the GBNF files in this directory ./main -m --grammar-file grammars/some-grammar.gbnf -p 'Some prompt' ``` +`llama.cpp` can also convert JSON schemas to grammars either ahead of time or at each request, see below. + ## Troubleshooting Grammars currently have performance gotchas (see https://github.com/ggerganov/llama.cpp/issues/4218). @@ -103,3 +105,40 @@ Grammars currently have performance gotchas (see https://github.com/ggerganov/ll A common pattern is to allow repetitions of a pattern `x` up to N times. While semantically correct, the syntax `x? x? x?.... x?` (with N repetitions) may result in extremely slow sampling. Instead, you can write `x{0,N}` (or `(x (x (x ... (x)?...)?)?)?` w/ N-deep nesting in earlier llama.cpp versions). + +## Using GBNF grammars + +You can use GBNF grammars: + +- In the [server](../examples/server)'s completion endpoints, passed as the `grammar` body field +- In the [main](../examples/main) CLI, passed as the `--grammar` & `--grammar-file` flags +- With the [gbnf-validator](../examples/gbnf-validator) tool, to test them against strings. + +## JSON Schemas → GBNF + +`llama.cpp` supports converting a subset of https://json-schema.org/ to GBNF grammars: + +- In the [server](../examples/server): + - For any completion endpoints, passed as the `json_schema` body field + - For the `/chat/completions` endpoint, passed inside the `result_format` body field (e.g. `{"type", "json_object", "schema": {"items": {}}}`) +- In the [main](../examples/main) CLI, passed as the `--json` / `-j` flag +- To convert to a grammar ahead of time: + - in CLI, with [json_schema_to_grammar.py](../examples/json_schema_to_grammar.py) + - in JavaScript with [json-schema-to-grammar.mjs](../examples/server/public/json-schema-to-grammar.mjs) (this is used by the [server](../examples/server)'s Web UI) + +Take a look at [tests](../../tests/test-json-schema-to-grammar.cpp) to see which features are likely supported (you'll also find usage examples in https://github.com/ggerganov/llama.cpp/pull/5978, https://github.com/ggerganov/llama.cpp/pull/6659 & https://github.com/ggerganov/llama.cpp/pull/6555). + +Here is also a non-exhaustive list of **unsupported** features: + +- `additionalProperties`: to be fixed in https://github.com/ggerganov/llama.cpp/pull/7840 +- `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + - `integer` constraints to be implemented in https://github.com/ggerganov/llama.cpp/pull/7797 +- Remote `$ref`s in the C++ version (Python & JavaScript versions fetch https refs) +- Mixing `properties` w/ `anyOf` / `oneOf` in the same type (https://github.com/ggerganov/llama.cpp/issues/7703) +- `string` formats `uri`, `email` +- [`contains`](https://json-schema.org/draft/2020-12/json-schema-core#name-contains) / `minContains` +- `uniqueItems` +- `$anchor` (cf. [dereferencing](https://json-schema.org/draft/2020-12/json-schema-core#name-dereferencing)) +- [`not`](https://json-schema.org/draft/2020-12/json-schema-core#name-not) +- [Conditionals](https://json-schema.org/draft/2020-12/json-schema-core#name-keywords-for-applying-subsche) `if` / `then` / `else` / `dependentSchemas` +- [`patternProperties`](https://json-schema.org/draft/2020-12/json-schema-core#name-patternproperties) diff --git a/grammars/json.gbnf b/grammars/json.gbnf index a8a80752e..064a53f8a 100644 --- a/grammars/json.gbnf +++ b/grammars/json.gbnf @@ -16,10 +16,10 @@ array ::= string ::= "\"" ( [^"\\\x7F\x00-\x1F] | - "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes + "\\" (["\\bfnrt] | "u" [0-9a-fA-F]{4}) # escapes )* "\"" ws -number ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws +number ::= ("-"? ([0-9] | [1-9] [0-9]{0,15})) ("." [0-9]+)? ([eE] [-+]? [0-9] [1-9]{0,15})? ws # Optional space: by convention, applied in this grammar after literal chars when allowed -ws ::= ([ \t\n] ws)? +ws ::= [ \t\n]{0,20} diff --git a/grammars/json_arr.gbnf b/grammars/json_arr.gbnf index 31a3202f8..bd1312d96 100644 --- a/grammars/json_arr.gbnf +++ b/grammars/json_arr.gbnf @@ -25,10 +25,10 @@ array ::= string ::= "\"" ( [^"\\\x7F\x00-\x1F] | - "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes + "\\" (["\\bfnrt] | "u" [0-9a-fA-F]{4}) # escapes )* "\"" ws -number ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws +number ::= ("-"? ([0-9] | [1-9] [0-9]{0,15})) ("." [0-9]+)? ([eE] [-+]? [1-9] [0-9]{0,15})? ws # Optional space: by convention, applied in this grammar after literal chars when allowed -ws ::= ([ \t\n] ws)? +ws ::= [ \t\n]{0,20} diff --git a/tests/test-json-schema-to-grammar.cpp b/tests/test-json-schema-to-grammar.cpp index 052c08073..bea876bd1 100755 --- a/tests/test-json-schema-to-grammar.cpp +++ b/tests/test-json-schema-to-grammar.cpp @@ -105,7 +105,7 @@ static void test_all(const std::string & lang, std::function Date: Tue, 11 Jun 2024 02:22:57 +0100 Subject: [PATCH 03/18] json: refine constraint for whitespace to avoid runaways yet allow pretty print (#7866) --- common/json-schema-to-grammar.cpp | 2 +- examples/json_schema_to_grammar.py | 5 +- .../server/public/json-schema-to-grammar.mjs | 2 +- grammars/json.gbnf | 2 +- grammars/json_arr.gbnf | 2 +- tests/test-json-schema-to-grammar.cpp | 76 +++++++++---------- 6 files changed, 44 insertions(+), 45 deletions(-) diff --git a/common/json-schema-to-grammar.cpp b/common/json-schema-to-grammar.cpp index 11221a32f..10b9b3d1d 100644 --- a/common/json-schema-to-grammar.cpp +++ b/common/json-schema-to-grammar.cpp @@ -40,7 +40,7 @@ static std::string build_repetition(const std::string & item_rule, int min_items return result; } -const std::string SPACE_RULE = "\" \"?"; +const std::string SPACE_RULE = "| \" \" | \"\\n\" [ \\t]{0,20}"; struct BuiltinRule { std::string content; diff --git a/examples/json_schema_to_grammar.py b/examples/json_schema_to_grammar.py index cd444d010..ab19e20df 100755 --- a/examples/json_schema_to_grammar.py +++ b/examples/json_schema_to_grammar.py @@ -29,9 +29,8 @@ class BuiltinRule: self.content = content self.deps = deps or [] -# whitespace is constrained to a single space char to prevent model "running away" in -# whitespace. Also maybe improves generation quality? -SPACE_RULE = '" "?' +# Constraining spaces to prevent model "running away". +SPACE_RULE = '| " " | "\\n" [ \\t]{0,20}' PRIMITIVE_RULES = { 'boolean' : BuiltinRule('("true" | "false") space', []), diff --git a/examples/server/public/json-schema-to-grammar.mjs b/examples/server/public/json-schema-to-grammar.mjs index dc2468396..faed6a32c 100644 --- a/examples/server/public/json-schema-to-grammar.mjs +++ b/examples/server/public/json-schema-to-grammar.mjs @@ -1,5 +1,5 @@ // WARNING: This file was ported from json_schema_to_grammar.py, please fix bugs / add features there first. -const SPACE_RULE = '" "?'; +const SPACE_RULE = '| " " | "\\n" [ \\t]{0,20}'; function _buildRepetition(itemRule, minItems, maxItems, opts={}) { if (minItems === 0 && maxItems === 1) { diff --git a/grammars/json.gbnf b/grammars/json.gbnf index 064a53f8a..b6448c87b 100644 --- a/grammars/json.gbnf +++ b/grammars/json.gbnf @@ -22,4 +22,4 @@ string ::= number ::= ("-"? ([0-9] | [1-9] [0-9]{0,15})) ("." [0-9]+)? ([eE] [-+]? [0-9] [1-9]{0,15})? ws # Optional space: by convention, applied in this grammar after literal chars when allowed -ws ::= [ \t\n]{0,20} +ws ::= | " " | "\n" [ \t]{0,20} diff --git a/grammars/json_arr.gbnf b/grammars/json_arr.gbnf index bd1312d96..b3dc6f9b1 100644 --- a/grammars/json_arr.gbnf +++ b/grammars/json_arr.gbnf @@ -31,4 +31,4 @@ string ::= number ::= ("-"? ([0-9] | [1-9] [0-9]{0,15})) ("." [0-9]+)? ([eE] [-+]? [1-9] [0-9]{0,15})? ws # Optional space: by convention, applied in this grammar after literal chars when allowed -ws ::= [ \t\n]{0,20} +ws ::= | " " | "\n" [ \t]{0,20} diff --git a/tests/test-json-schema-to-grammar.cpp b/tests/test-json-schema-to-grammar.cpp index bea876bd1..a33104dea 100755 --- a/tests/test-json-schema-to-grammar.cpp +++ b/tests/test-json-schema-to-grammar.cpp @@ -112,7 +112,7 @@ static void test_all(const std::string & lang, std::function Date: Tue, 11 Jun 2024 07:59:20 +0200 Subject: [PATCH 04/18] fix CUDA CI by using a windows-2019 image (#7861) * try to fix CUDA ci with --allow-unsupported-compiler * trigger when build.yml changes * another test * try exllama/bdashore3 method * install vs build tools before cuda toolkit * try win-2019 --- .github/workflows/build.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 93669d531..3c04cfc29 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,7 +13,7 @@ on: paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m'] pull_request: types: [opened, synchronize, reopened] - paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m'] + paths: ['.github/workflows/build.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.cuh', '**/*.swift', '**/*.m'] concurrency: group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }} @@ -684,7 +684,7 @@ jobs: cmake --build build --config ${{ matrix.build }} -j $(nproc) windows-latest-cmake: - runs-on: windows-latest + runs-on: windows-2019 env: OPENBLAS_VERSION: 0.3.23 @@ -829,7 +829,7 @@ jobs: name: llama-bin-win-${{ matrix.build }}.zip windows-latest-cmake-cuda: - runs-on: windows-latest + runs-on: windows-2019 strategy: matrix: @@ -843,8 +843,9 @@ jobs: with: fetch-depth: 0 - - uses: Jimver/cuda-toolkit@v0.2.11 + - name: Install CUDA toolkit id: cuda-toolkit + uses: Jimver/cuda-toolkit@v0.2.15 with: cuda: ${{ matrix.cuda }} method: 'network' From bdcb8f42221bc40c411150a009a3d3a30fa74722 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 11 Jun 2024 08:26:07 +0200 Subject: [PATCH 05/18] CUDA: int8 tensor cores for MMQ (q4_K, q5_K, q6_K) (#7860) --- ggml-cuda/mma.cuh | 66 ++++++++++ ggml-cuda/mmq.cuh | 300 +++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 360 insertions(+), 6 deletions(-) diff --git a/ggml-cuda/mma.cuh b/ggml-cuda/mma.cuh index 71e8e3429..63e07fbc2 100644 --- a/ggml-cuda/mma.cuh +++ b/ggml-cuda/mma.cuh @@ -1,5 +1,27 @@ #include "common.cuh" +struct mma_int_A_I16K4 { + static constexpr int I = 16; + static constexpr int K = 4; + static constexpr int ne = 2; + + int x[ne] = {0}; + + static __device__ __forceinline__ int get_i(const int l) { + const int ret = (l%2) * (I/2) + threadIdx.x / K; + GGML_CUDA_ASSUME(ret >= 0); + GGML_CUDA_ASSUME(ret < I); + return ret; + } + + static __device__ __forceinline__ int get_k(const int /* l */) { + const int ret = threadIdx.x % K; + GGML_CUDA_ASSUME(ret >= 0); + GGML_CUDA_ASSUME(ret < K); + return ret; + } +}; + struct mma_int_A_I16K8 { static constexpr int I = 16; static constexpr int K = 8; @@ -22,6 +44,28 @@ struct mma_int_A_I16K8 { } }; +struct mma_int_B_J8K4 { + static constexpr int J = 8; + static constexpr int K = 4; + static constexpr int ne = 1; + + int x[ne] = {0}; + + static __device__ __forceinline__ int get_j(const int /* l */) { + const int ret = threadIdx.x / K; + GGML_CUDA_ASSUME(ret >= 0); + GGML_CUDA_ASSUME(ret < J); + return ret; + } + + static __device__ __forceinline__ int get_k(const int /* l */) { + const int ret = threadIdx.x % K; + GGML_CUDA_ASSUME(ret >= 0); + GGML_CUDA_ASSUME(ret < K); + return ret; + } +}; + struct mma_int_B_J8K8 { static constexpr int J = 8; static constexpr int K = 8; @@ -65,6 +109,28 @@ struct mma_int_C_I16J8 { return ret; } + __device__ __forceinline__ void mma_K4(const mma_int_A_I16K4 & mma_A, const mma_int_B_J8K4 & mma_B) { +#ifdef INT8_MMA_AVAILABLE +#if __CUDA_ARCH__ >= CC_AMPERE + asm("mma.sync.aligned.m16n8k16.row.col.s32.s8.s8.s32 {%0, %1, %2, %3}, {%4, %5}, {%6}, {%0, %1, %2, %3};" + : "+r"(x[0]), "+r"(x[1]), "+r"(x[2]), "+r"(x[3]) + : "r"(mma_A.x[0]), "r"(mma_A.x[1]), "r"(mma_B.x[0])); +#else + // On Turing m16n8k16 mma is not available, use 2x m8n8k16 mma instead: + asm("mma.sync.aligned.m8n8k16.row.col.s32.s8.s8.s32 {%0, %1}, {%2}, {%3}, {%0, %1};" + : "+r"(x[0]), "+r"(x[1]) + : "r"(mma_A.x[0]), "r"(mma_B.x[0])); + asm("mma.sync.aligned.m8n8k16.row.col.s32.s8.s8.s32 {%0, %1}, {%2}, {%3}, {%0, %1};" + : "+r"(x[2]), "+r"(x[3]) + : "r"(mma_A.x[1]), "r"(mma_B.x[0])); +#endif // __CUDA_ARCH__ >= CC_AMPERE +#else + GGML_UNUSED(mma_A); + GGML_UNUSED(mma_B); + NO_DEVICE_CODE; +#endif // INT8_MMA_AVAILABLE + } + __device__ __forceinline__ void mma_K8(const mma_int_A_I16K8 & mma_A, const mma_int_B_J8K8 & mma_B) { #ifdef INT8_MMA_AVAILABLE #if __CUDA_ARCH__ >= CC_AMPERE diff --git a/ggml-cuda/mmq.cuh b/ggml-cuda/mmq.cuh index 62111f376..01e2086b4 100644 --- a/ggml-cuda/mmq.cuh +++ b/ggml-cuda/mmq.cuh @@ -1089,7 +1089,7 @@ template static __device__ __forceinlin } template -static __device__ __forceinline__ void vec_dot_q4_K_q8_1_mul_mat( +static __device__ __forceinline__ void vec_dot_q4_K_q8_1_dp4a( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y, float * __restrict__ sum, const int & k0) { @@ -1115,6 +1115,97 @@ static __device__ __forceinline__ void vec_dot_q4_K_q8_1_mul_mat( } } +template +static __device__ __forceinline__ void vec_dot_q4_K_q8_1_mma( + const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, + const int * __restrict__ y, float * __restrict__ sum, const int & k0) { + + GGML_UNUSED(x_qh); GGML_UNUSED(x_sc); + + typedef mma_int_A_I16K8 mma_A; + typedef mma_int_B_J8K8 mma_B; + typedef mma_int_C_I16J8 mma_C; + + const int * y_qs = (const int *) y + 4; + const half2 * y_ds = (const half2 *) y; + + const int i0 = threadIdx.y*mma_A::I; + static_assert(nwarps*mma_A::I == mmq_y, "nwarps*mma_A::I != mmq_y"); + + mma_A A[2]; + int scA[mma_C::ne/2][2]; + int mA[mma_C::ne/2][2]; + half2 dmA[mma_C::ne/2]; +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q4_K_Q8_1_MMQ; kvdr += 4) { +#pragma unroll + for (int l = 0; l < mma_A::ne; ++l) { + const int i = i0 + mma_A::get_i(l); + const int k = k0 + mma_A::get_k(l); + + A[kvdr/4].x[l] = (x_ql[i*(WARP_SIZE + 1) + k] >> kvdr) & 0x0F0F0F0F; + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + const uint8_t * sc = ((const uint8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k0/16]) + 2 * ((k0 % 16) / 8); + const uint8_t * m = sc + 8; + + scA[l][kvdr/4] = sc[kvdr/4]; + mA[l][kvdr/4] = m[kvdr/4]; + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + dmA[l] = x_dm[i*(WARP_SIZE/QI5_K) + i/QI5_K + k0/QI5_K]; + } + +#pragma unroll + for (int j0 = 0; j0 < mmq_x; j0 += mma_int_B_J8K8::J) { + float tmpd[mma_C::ne] = {0.0f}; + float tmpm[mma_C::ne] = {0.0f}; + +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q5_K_Q8_1_MMQ; kvdr += 4) { + mma_C C; + mma_B B; + half2 dsB[mma_C::ne/2]; + +#pragma unroll + for (int l = 0; l < mma_B::ne; ++l) { + const int j = j0 + mma_B::get_j(l); + const int k = (2*k0 + 2*kvdr + mma_B::get_k(l)) % WARP_SIZE; + + B.x[l] = y_qs[j*MMQ_TILE_Y_K + k]; + } +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int j = j0 + mma_C::get_j(l); + + dsB[l] = y_ds[j*MMQ_TILE_Y_K + ((2*k0 + 2*kvdr)/QI8_1) % (WARP_SIZE/QI8_1)]; + } + + C.mma_K8(A[kvdr/4], B); + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + tmpd[l] += (C.x[l]*scA[l/2][kvdr/4]) * __low2float(dsB[l%2]); + tmpm[l] += mA[l/2][kvdr/4] * __high2float(dsB[l%2]); + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + sum[(j0/mma_B::J)*mma_C::ne + l] += __low2float(dmA[l/2])*tmpd[l] - __high2float(dmA[l/2])*tmpm[l]; + } + } +} + template static __device__ __forceinline__ void load_tiles_q5_K( const char * __restrict__ x, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & kbx0, const int & i_max, const int & stride) { @@ -1188,7 +1279,7 @@ template static __device__ __forceinlin } template -static __device__ __forceinline__ void vec_dot_q5_K_q8_1_mul_mat( +static __device__ __forceinline__ void vec_dot_q5_K_q8_1_dp4a( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y, float * __restrict__ sum, const int & k0) { @@ -1214,6 +1305,97 @@ static __device__ __forceinline__ void vec_dot_q5_K_q8_1_mul_mat( } } +template +static __device__ __forceinline__ void vec_dot_q5_K_q8_1_mma( + const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, + const int * __restrict__ y, float * __restrict__ sum, const int & k0) { + + GGML_UNUSED(x_qh); GGML_UNUSED(x_sc); + + typedef mma_int_A_I16K8 mma_A; + typedef mma_int_B_J8K8 mma_B; + typedef mma_int_C_I16J8 mma_C; + + const int * y_qs = (const int *) y + 4; + const half2 * y_ds = (const half2 *) y; + + const int i0 = threadIdx.y*mma_A::I; + static_assert(nwarps*mma_A::I == mmq_y, "nwarps*mma_A::I != mmq_y"); + + mma_A A[2]; + int scA[mma_C::ne/2][2]; + int mA[mma_C::ne/2][2]; + half2 dmA[mma_C::ne/2]; +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q5_K_Q8_1_MMQ; kvdr += 4) { +#pragma unroll + for (int l = 0; l < mma_A::ne; ++l) { + const int i = i0 + mma_A::get_i(l); + const int k = QR5_K*k0 + QR5_K*kvdr + mma_A::get_k(l); + + A[kvdr/4].x[l] = x_ql[i*(QR5_K*WARP_SIZE + 1) + k]; + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + const uint8_t * sc = ((const uint8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k0/16]) + 2 * ((k0 % 16) / 8); + const uint8_t * m = sc + 8; + + scA[l][kvdr/4] = sc[kvdr/4]; + mA[l][kvdr/4] = m[kvdr/4]; + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + dmA[l] = x_dm[i*(WARP_SIZE/QI5_K) + i/QI5_K + k0/QI5_K]; + } + +#pragma unroll + for (int j0 = 0; j0 < mmq_x; j0 += mma_int_B_J8K8::J) { + float tmpd[mma_C::ne] = {0.0f}; + float tmpm[mma_C::ne] = {0.0f}; + +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q5_K_Q8_1_MMQ; kvdr += 4) { + mma_C C; + mma_B B; + half2 dsB[mma_C::ne/2]; + +#pragma unroll + for (int l = 0; l < mma_B::ne; ++l) { + const int j = j0 + mma_B::get_j(l); + const int k = (2*k0 + 2*kvdr + mma_B::get_k(l)) % WARP_SIZE; + + B.x[l] = y_qs[j*MMQ_TILE_Y_K + k]; + } +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int j = j0 + mma_C::get_j(l); + + dsB[l] = y_ds[j*MMQ_TILE_Y_K + ((2*k0 + 2*kvdr)/QI8_1) % (WARP_SIZE/QI8_1)]; + } + + C.mma_K8(A[kvdr/4], B); + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + tmpd[l] += (C.x[l]*scA[l/2][kvdr/4]) * __low2float(dsB[l%2]); + tmpm[l] += mA[l/2][kvdr/4] * __high2float(dsB[l%2]); + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + sum[(j0/mma_B::J)*mma_C::ne + l] += __low2float(dmA[l/2])*tmpd[l] - __high2float(dmA[l/2])*tmpm[l]; + } + } +} + template static __device__ __forceinline__ void load_tiles_q6_K( const char * __restrict__ x, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & kbx0, const int & i_max, const int & stride) { @@ -1280,7 +1462,7 @@ template static __device__ __forceinlin } template -static __device__ __forceinline__ void vec_dot_q6_K_q8_1_mul_mat( +static __device__ __forceinline__ void vec_dot_q6_K_q8_1_dp4a( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y, float * __restrict__ sum, const int & k0) { @@ -1307,6 +1489,97 @@ static __device__ __forceinline__ void vec_dot_q6_K_q8_1_mul_mat( } } +template +static __device__ __forceinline__ void vec_dot_q6_K_q8_1_mma( + const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, + const int * __restrict__ y, float * __restrict__ sum, const int & k0) { + + GGML_UNUSED(x_qh); GGML_UNUSED(x_sc); + + typedef mma_int_A_I16K4 mma_A; + typedef mma_int_B_J8K4 mma_B; + typedef mma_int_C_I16J8 mma_C; + + const float * x_df = (const float *) x_dm; + const int * y_qs = (const int *) y + 4; + const float * y_df = (const float *) y; + + const int i0 = threadIdx.y*mma_A::I; + static_assert(nwarps*mma_A::I == mmq_y, "nwarps*mma_A::I != mmq_y"); + + mma_A A[4]; + int scA[mma_C::ne/2][4]; + float dA[mma_C::ne/2]; +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q6_K_Q8_1_MMQ; kvdr += 4) { +#pragma unroll + for (int l = 0; l < mma_A::ne; ++l) { + const int i = i0 + mma_A::get_i(l); + const int k = QR6_K*k0 + QR6_K*kvdr + mma_A::get_k(l); + + A[kvdr/2 + 0].x[l] = x_ql[i*(QR6_K*WARP_SIZE + 1) + k + 0]; + A[kvdr/2 + 1].x[l] = x_ql[i*(QR6_K*WARP_SIZE + 1) + k + mma_A::K]; + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + const int8_t * sc = ((const int8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k0/8]); + + scA[l][kvdr/2 + 0] = sc[kvdr/2 + 0]; + scA[l][kvdr/2 + 1] = sc[kvdr/2 + 1]; + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int i = i0 + mma_C::get_i(2*l); + + dA[l] = x_df[i*(WARP_SIZE/QI6_K) + i/QI6_K + k0/QI6_K]; + } + +#pragma unroll + for (int j0 = 0; j0 < mmq_x; j0 += mma_int_B_J8K8::J) { + float tmp[mma_C::ne] = {0.0f}; + +#pragma unroll + for (int kvdr = 0; kvdr < VDR_Q6_K_Q8_1_MMQ; kvdr += 4) { + mma_C C[2]; + mma_B B[2]; + float dB[mma_C::ne/2]; + +#pragma unroll + for (int l = 0; l < mma_B::ne; ++l) { + const int j = j0 + mma_B::get_j(l); + const int k = (2*k0 + 2*kvdr + mma_B::get_k(l)) % WARP_SIZE; + + B[0].x[l] = y_qs[j*MMQ_TILE_Y_K + k + 0]; + B[1].x[l] = y_qs[j*MMQ_TILE_Y_K + k + mma_B::K]; + } +#pragma unroll + for (int l = 0; l < mma_C::ne/2; ++l) { + const int j = j0 + mma_C::get_j(l); + + dB[l] = y_df[j*MMQ_TILE_Y_K + ((2*k0 + 2*kvdr)/QI8_1) % (WARP_SIZE/QI8_1)]; + } + + C[0].mma_K4(A[kvdr/2 + 0], B[0]); + C[1].mma_K4(A[kvdr/2 + 1], B[1]); + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + tmp[l] += (C[0].x[l]*scA[l/2][kvdr/2 + 0] + C[1].x[l]*scA[l/2][kvdr/2 + 1])*dB[l%2]; + } + } + +#pragma unroll + for (int l = 0; l < mma_C::ne; ++l) { + sum[(j0/mma_B::J)*mma_C::ne + l] += tmp[l]*dA[l/2]; + } + } +} + template static __device__ __forceinline__ void mmq_write_back_dp4a(const float * __restrict__ sum, float * __restrict__ dst, const int & ne0, const int & ne1) { #pragma unroll @@ -1448,24 +1721,39 @@ template struct mmq_type_traits { static constexpr int vdr = VDR_Q4_K_Q8_1_MMQ; static constexpr load_tiles_mmq_t load_tiles = load_tiles_q4_K; - static constexpr vec_dot_mmq_t vec_dot = vec_dot_q4_K_q8_1_mul_mat; +#ifdef INT8_MMA_AVAILABLE + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q4_K_q8_1_mma; + static constexpr mmq_write_back_t write_back = mmq_write_back_mma; +#else + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q4_K_q8_1_dp4a; static constexpr mmq_write_back_t write_back = mmq_write_back_dp4a; +#endif // INT8_MMA_AVAILABLE }; template struct mmq_type_traits { static constexpr int vdr = VDR_Q5_K_Q8_1_MMQ; static constexpr load_tiles_mmq_t load_tiles = load_tiles_q5_K; - static constexpr vec_dot_mmq_t vec_dot = vec_dot_q5_K_q8_1_mul_mat; +#ifdef INT8_MMA_AVAILABLE + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q5_K_q8_1_mma; + static constexpr mmq_write_back_t write_back = mmq_write_back_mma; +#else + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q5_K_q8_1_dp4a; static constexpr mmq_write_back_t write_back = mmq_write_back_dp4a; +#endif // INT8_MMA_AVAILABLE }; template struct mmq_type_traits { static constexpr int vdr = VDR_Q6_K_Q8_1_MMQ; static constexpr load_tiles_mmq_t load_tiles = load_tiles_q6_K; - static constexpr vec_dot_mmq_t vec_dot = vec_dot_q6_K_q8_1_mul_mat; +#ifdef INT8_MMA_AVAILABLE + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q6_K_q8_1_mma; + static constexpr mmq_write_back_t write_back = mmq_write_back_mma; +#else + static constexpr vec_dot_mmq_t vec_dot = vec_dot_q6_K_q8_1_dp4a; static constexpr mmq_write_back_t write_back = mmq_write_back_dp4a; +#endif // INT8_MMA_AVAILABLE }; static int mmq_need_sum(const ggml_type type_x) { From 4bfe50f741479c1df1c377260c3ff5702586719e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 11 Jun 2024 10:10:20 +0300 Subject: [PATCH 06/18] tests : check the Python version (#7872) ggml-ci --- tests/test-json-schema-to-grammar.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test-json-schema-to-grammar.cpp b/tests/test-json-schema-to-grammar.cpp index a33104dea..87bc66b69 100755 --- a/tests/test-json-schema-to-grammar.cpp +++ b/tests/test-json-schema-to-grammar.cpp @@ -870,7 +870,7 @@ int main() { } }); - if (getenv("LLAMA_PYTHON_AVAILABLE") || (std::system("python --version") == 0)) { + if (getenv("LLAMA_PYTHON_AVAILABLE") || (std::system("python -c \"import sys; exit(1) if sys.version_info < (3, 8) else print('Python version is sufficient')\"") == 0)) { test_all("Python", [](const TestCase & tc) { write("test-json-schema-input.tmp", tc.schema); tc.verify_status(std::system( @@ -878,7 +878,7 @@ int main() { tc.verify(read("test-grammar-output.tmp")); }); } else { - fprintf(stderr, "\033[33mWARNING: Python not found, skipping Python JSON schema -> grammar tests.\n\033[0m"); + fprintf(stderr, "\033[33mWARNING: Python not found (min version required is 3.8), skipping Python JSON schema -> grammar tests.\n\033[0m"); } if (getenv("LLAMA_NODE_AVAILABLE") || (std::system("node --version") == 0)) { From 148995e5e57b313cce2672f75610db58c6327a51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 11 Jun 2024 14:45:40 +0200 Subject: [PATCH 07/18] llama-bench: more compact markdown tables (#7879) --- examples/llama-bench/llama-bench.cpp | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 5c31548a6..61f5a5a09 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -1033,6 +1033,27 @@ struct markdown_printer : public printer { if (field == "n_gpu_layers") { return 3; } + if (field == "n_threads") { + return 7; + } + if (field == "n_batch") { + return 7; + } + if (field == "n_ubatch") { + return 8; + } + if (field == "type_k" || field == "type_v") { + return 6; + } + if (field == "split_mode") { + return 5; + } + if (field == "flash_attn") { + return 2; + } + if (field == "use_mmap") { + return 4; + } if (field == "test") { return 13; } From 6fe42d073f0554eada93ac9d40574025aeedb703 Mon Sep 17 00:00:00 2001 From: Brian Date: Wed, 12 Jun 2024 00:43:41 +1000 Subject: [PATCH 08/18] github: move PR template to .github/ root (#7868) --- .github/{PULL_REQUEST_TEMPLATE => }/pull_request_template.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{PULL_REQUEST_TEMPLATE => }/pull_request_template.md (100%) diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/pull_request_template.md similarity index 100% rename from .github/PULL_REQUEST_TEMPLATE/pull_request_template.md rename to .github/pull_request_template.md From 14f83526cd27f638c856ea6eff08110b9860eb2a Mon Sep 17 00:00:00 2001 From: Deven Mistry <31466137+deven367@users.noreply.github.com> Date: Tue, 11 Jun 2024 12:18:58 -0400 Subject: [PATCH 09/18] fix broken link in pr template (#7880) [no ci] * fix broken link in pr template * Update pull_request_template.md [no ci] --------- Co-authored-by: Brian --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 0852fded5..e6d032d87 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -2,4 +2,4 @@ - [ ] Review Complexity : Low - [ ] Review Complexity : Medium - [ ] Review Complexity : High -- [ ] I have read the [contributing guidelines](CONTRIBUTING.md) +- [ ] I have read the [contributing guidelines](https://github.com/ggerganov/llama.cpp/blob/master/CONTRIBUTING.md) From ef52d1d16afc695d798396cdd13594ea5e45a9dd Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Tue, 11 Jun 2024 21:20:29 +0200 Subject: [PATCH 10/18] Update Vulkan RoPE implementation (#7818) * Update Vulkan RoPE implementation * Return nullptr on alloc_buffer when allocation fails, instead of throwing an exception Minor fixes * Fix segfault when running out of VRAM Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-alloc.c | 2 +- ggml-vulkan-shaders.hpp | 2377 ++++++++++++++++++----------------- ggml-vulkan.cpp | 93 +- ggml_vk_generate_shaders.py | 66 +- 4 files changed, 1311 insertions(+), 1227 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index 73a3c1575..eb75962d4 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -886,7 +886,7 @@ static bool alloc_tensor_range(struct ggml_context * ctx, fprintf(stderr, "%s: failed to allocate %s buffer of size %zu\n", __func__, ggml_backend_buft_name(buft), size); #endif for (size_t i = 0; i < *n_buffers; i++) { - ggml_backend_buffer_free(*buffers[i]); + ggml_backend_buffer_free((*buffers)[i]); } free(*buffers); return false; diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index b50f55860..4a8ee3415 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -137706,326 +137706,72 @@ unsigned char rms_norm_f32_data[] = { }; const uint64_t rms_norm_f32_len = 2344; -unsigned char rope_f16_data[] = { +unsigned char rope_neox_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x1c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x50,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x88,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x89,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x89,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xab,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xbb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xbb,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x6f,0x12,0x83,0x3a,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x00,0x00,0x80,0x3f, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x1e,0x00,0x09,0x00,0x2a,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x3c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0xcd,0xcc,0xcc,0x3d, -0x17,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x88,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x8a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x8a,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xa9,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xb0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xba,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x65,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd6,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x3c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x7f,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2f,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xb7,0x00,0x05,0x00, -0x3c,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x02,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xea,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x2f,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x2f,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0xee,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x1f,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0xe8,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0xe8,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x0c,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xe2,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x02,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0xe2,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xe5,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x0c,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xb0,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x7f,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0xc5,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xb0,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xb0,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd6,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t rope_f16_len = 3156; - -unsigned char rope_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x0f,0x00,0x0b,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, 0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x67,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x89,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x89,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x89,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xaa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xac,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x88,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xb8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xba,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x88,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8a,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x90,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x90,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xaf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xaf,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb1,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xc9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xc9,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xcb,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xcb,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x16,0x00,0x03,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -138035,25 +137781,26 @@ unsigned char rope_f32_data[] = { 0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x6f,0x12,0x83,0x3a, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, 0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x1e,0x00,0x09,0x00,0x2a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x1e,0x00,0x0c,0x00,0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00, 0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x3c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x3b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00, 0xcd,0xcc,0xcc,0x3d,0x17,0x00,0x04,0x00,0x65,0x00,0x00,0x00, 0x07,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, @@ -138064,593 +137811,627 @@ unsigned char rope_f32_data[] = { 0x69,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x88,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x8a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x8a,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x8d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xab,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xab,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xb7,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xb8,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x65,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xd1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x6e,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x75,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2f,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xb7,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xfd,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe4,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x2f,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x2f,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0xe7,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0xe3,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0xe3,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xdd,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0xe0,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xaf,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xc2,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xaf,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xaf,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t rope_f32_len = 3072; - -unsigned char rope_neox_f16_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0b,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x68,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x95,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x96,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x96,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x96,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x98,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x98,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xc5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xd2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xd3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xd3,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xd3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x20,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x6f,0x12,0x83,0x3a,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x00,0x00,0x80,0x3f, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x1e,0x00,0x0d,0x00,0x2a,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x3c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xcd,0xcc,0xcc,0x3d,0x17,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x94,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x97,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xc2,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xd2,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xd3,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x66,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x69,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x00,0x00,0x00,0x3f,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x22,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0xac,0x00,0x05,0x00,0x3c,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa1,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa1,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x73,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xc7,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x73,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xab,0x00,0x05,0x00, -0x3c,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xce,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xd8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2f,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0xe1,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0xb7,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x4d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x34,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x2f,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x2f,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0xb8,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x37,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x86,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x89,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x93,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xae,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xb0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xc9,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xca,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xce,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x65,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x00,0x00,0x00,0x3f, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x08,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x08,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x08,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xab,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xc7,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xce,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd1,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xb7,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x1a,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x1b,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x2e,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x2b,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x2e,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0xa5,0x00,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, 0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, 0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x33,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, 0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0xed,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0x4a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x4d,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xd1,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0xd1,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x0c,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x65,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x65,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xc7,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x0c,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, 0x17,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x9e,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x7f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x94,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xa1,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x98,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x98,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1e,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0xe9,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x7f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0xf4,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x86,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xec,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x08,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x08,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t rope_neox_f16_len = 4132; +const uint64_t rope_neox_f16_len = 3952; unsigned char rope_neox_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x63,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x4a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0b,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2a,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x68,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x95,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x86,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x87,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x89,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x8d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x8d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x8d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xae,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xae,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xae,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb0,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb0,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xc8,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xca,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xca,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x6f,0x12,0x83,0x3a,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x1e,0x00,0x0c,0x00,0x29,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x3b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0xcd,0xcc,0xcc,0x3d,0x17,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x86,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x88,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x8c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x92,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xad,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xb2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xc7,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x65,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x00,0x00,0x00,0x3f, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x02,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0xab,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xc6,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc5,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0xce,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xd3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xb7,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x2e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x2e,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x2b,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x2e,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0xd3,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x11,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x29,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x0c,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xf0,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x02,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t rope_neox_f32_len = 3852; + +unsigned char rope_norm_f16_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x49,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0b,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x67,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x88,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x95,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x88,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8a,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x97,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x9b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x90,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xc1,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xae,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xc2,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xae,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb0,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc4,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xd2,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xd2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xd2,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xca,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x1a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x16,0x00,0x03,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -138660,270 +138441,590 @@ unsigned char rope_neox_f32_data[] = { 0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x6f,0x12,0x83,0x3a, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, 0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x1e,0x00,0x0c,0x00,0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x3b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0xcd,0xcc,0xcc,0x3d,0x17,0x00,0x04,0x00,0x65,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x66,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x69,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x86,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x89,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x93,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xad,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xaf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xb2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xc7,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x65,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x00,0x00,0x00,0x3f, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x01,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x01,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x01,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xb2,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0xab,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xc6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xcd,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0xb7,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x15,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x2e,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x2b,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x2e,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0xd4,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0xc6,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x7f,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x31,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x86,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x93,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x93,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfe,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x01,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x01,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t rope_norm_f16_len = 3952; + +unsigned char rope_norm_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0b,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x86,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x87,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x89,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x8d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x8d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x8d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xac,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xad,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xad,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xad,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xaf,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xc7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xc7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x07,0x00,0x00,0x00, 0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x1e,0x00,0x0d,0x00,0x2a,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x6f,0x12,0x83,0x3a,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x1e,0x00,0x0c,0x00,0x29,0x00,0x00,0x00, 0x07,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00, 0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x3c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xcd,0xcc,0xcc,0x3d, -0x17,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x94,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x95,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x96,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x96,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x9a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xc1,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xc2,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xd1,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xd3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x66,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x69,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x00,0x00,0x00,0x3f,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x1b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0xac,0x00,0x05,0x00,0x3c,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xa0,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa0,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xa0,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x73,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xc6,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x73,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xab,0x00,0x05,0x00, -0x3c,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd0,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcd,0x00,0x00,0x00, -0xcf,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa0,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd0,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0xd8,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2f,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x2c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0xdf,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2f,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xb7,0x00,0x05,0x00,0x3c,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x47,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x2f,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x2f,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0xb7,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x3b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0xcd,0xcc,0xcc,0x3d,0x17,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x86,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x88,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x8c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x92,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xac,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xae,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xb1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xc6,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x65,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x00,0x00,0x00,0x3f, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xfb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x69,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x3b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x07,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xb1,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x07,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xab,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xc5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc4,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc5,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0xd2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0xb7,0x00,0x05,0x00,0x3b,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x27,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x0e,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0f,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x2e,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x2b,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x2e,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x2b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, 0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x88,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x47,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x47,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x0c,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xa0,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x07,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa0,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x01,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xa0,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x97,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xa0,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x97,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1b,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x28,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x1f,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0xd2,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x1f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x27,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x27,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0xc5,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0xc5,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x29,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x0c,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x07,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xec,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x92,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfb,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t rope_neox_f32_len = 4032; +const uint64_t rope_norm_f32_len = 3852; unsigned char scale_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 128769177..05cfa3159 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -150,7 +150,7 @@ struct vk_device { vk_pipeline pipeline_relu_f32; vk_pipeline pipeline_diag_mask_inf_f32; vk_pipeline pipeline_soft_max_f32, pipeline_soft_max_f32_f16; - vk_pipeline pipeline_rope_f32, pipeline_rope_f16; + vk_pipeline pipeline_rope_norm_f32, pipeline_rope_norm_f16; vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; vk_pipeline pipeline_argsort_f32; vk_pipeline pipeline_sum_rows_f32; @@ -283,26 +283,15 @@ struct vk_op_diag_mask_push_constants { struct vk_op_rope_push_constants { uint32_t ncols; + uint32_t n_dims; float freq_scale; uint32_t p_delta_rows; float freq_base; float ext_factor; float attn_factor; - float corr_dims[4]; -}; - -struct vk_op_rope_neox_push_constants { - uint32_t ncols; - uint32_t ndims; - float freq_scale; - uint32_t p_delta_rows; - float freq_base; - float ext_factor; - float attn_factor; - float corr_dims[4]; + float corr_dims[2]; float theta_scale; - float inv_ndims; - uint32_t has_freq_facs; + uint32_t has_ff; }; struct vk_op_soft_max_push_constants { @@ -1534,11 +1523,11 @@ static void ggml_vk_load_shaders(ggml_backend_vk_context * ctx) { ggml_vk_create_pipeline(ctx, ctx->device->pipeline_soft_max_f32, "soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_soft_max_push_constants), {1, 1, 1}, {}, 1); ggml_vk_create_pipeline(ctx, ctx->device->pipeline_soft_max_f32_f16, "soft_max_f32_f16", soft_max_f32_f16_len, soft_max_f32_f16_data, "main", 3, sizeof(vk_op_soft_max_push_constants), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_f32, "rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_f16, "rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_norm_f32, "rope_norm_f32", rope_norm_f32_len, rope_norm_f32_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_norm_f16, "rope_norm_f16", rope_norm_f16_len, rope_norm_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 4, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 4, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); ggml_vk_create_pipeline(ctx, ctx->device->pipeline_argsort_f32, "argsort_f32", argsort_f32_len, argsort_f32_data, "main", 2, sizeof(vk_op_argsort_push_constants), {1024, 1, 1}, {}, 1); @@ -3905,10 +3894,10 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const } } else { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return ctx->device->pipeline_rope_f32; + return ctx->device->pipeline_rope_norm_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return ctx->device->pipeline_rope_f16; + return ctx->device->pipeline_rope_norm_f16; } } return nullptr; @@ -4152,24 +4141,16 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, subbuf_y, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (op == GGML_OP_ROPE) { - const int mode = ((int32_t *) dst->op_params)[2]; - const bool is_neox = mode & 2; - - if (is_neox) { - // Empty src2 is possible in rope, but the shader needs a buffer - vk_subbuffer subbuf_z; - if (use_src2) { - subbuf_z = { d_Z, z_buf_offset, z_sz }; - } else { - subbuf_z = { d_X, 0, d_X->size }; - } - - ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, subbuf_z, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + // Empty src2 is possible in rope, but the shader needs a buffer + vk_subbuffer subbuf_z; + if (use_src2) { + subbuf_z = { d_Z, z_buf_offset, z_sz }; } else { - ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + subbuf_z = { d_X, 0, d_X->size }; } + + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, subbuf_z, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src2) { ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_Z, z_buf_offset, z_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); @@ -4391,7 +4372,7 @@ static void ggml_vk_soft_max(ggml_backend_vk_context * ctx, vk_context * subctx, static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst) { const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; + // const int mode = ((int32_t *) dst->op_params)[2]; // const int n_ctx = ((int32_t *) dst->op_params)[3]; const int n_ctx_orig = ((int32_t *) dst->op_params)[4]; const float freq_base = ((float *) dst->op_params)[5]; @@ -4401,28 +4382,16 @@ static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, con const float beta_fast = ((float *) dst->op_params)[9]; const float beta_slow = ((float *) dst->op_params)[10]; - const bool is_neox = mode & 2; - -#pragma message("TODO: update rope NORM mode to match NEOX mode") -#pragma message(" https://github.com/ggerganov/llama.cpp/pull/7634") - float corr_dims[2]; ggml_rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); - if (is_neox) { - const float theta_scale = powf(freq_base, -2.0f/n_dims); - const float inv_ndims = -1.0f / n_dims; - ggml_vk_op_f32(ctx, subctx, src0, src1, src2, dst, GGML_OP_ROPE, { - (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], - freq_base, ext_factor, attn_factor, {corr_dims[0], corr_dims[1], 0.0f, 0.0f}, theta_scale, inv_ndims, - src2 != nullptr, - }); - } else { - ggml_vk_op_f32(ctx, subctx, src0, src1, src2, dst, GGML_OP_ROPE, { - (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], - freq_base, ext_factor, attn_factor, {corr_dims[0], corr_dims[1], 0.0f, 0.0f} - }); - } + const float theta_scale = powf(freq_base, -2.0f/n_dims); + + ggml_vk_op_f32(ctx, subctx, src0, src1, src2, dst, GGML_OP_ROPE, { + (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], + freq_base, ext_factor, attn_factor, {corr_dims[0], corr_dims[1]}, theta_scale, + src2 != nullptr, + }); } static void ggml_vk_argsort(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { @@ -6070,7 +6039,13 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer( std::cerr << "ggml_backend_vk_buffer_type_alloc_buffer(" << size << ")" << std::endl; #endif ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; - vk_buffer dev_buffer = ggml_vk_create_buffer_device(ctx->ctx, size); + + vk_buffer dev_buffer = nullptr; + try { + dev_buffer = ggml_vk_create_buffer_device(ctx->ctx, size); + } catch (const vk::SystemError& e) { + return nullptr; + } ggml_backend_vk_buffer_context * bufctx = new ggml_backend_vk_buffer_context(ctx->ctx, std::move(dev_buffer), ctx->name); @@ -6466,7 +6441,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const // return src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16; // } break; case GGML_OP_ROPE: - return true; + return ggml_is_contiguous(op->src[0]); case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index a905f570c..400a63f57 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -2400,7 +2400,7 @@ void main() { """ # ROPE -rope_src = """ +rope_norm_src = """ #version 450 #extension GL_EXT_shader_16bit_storage : require @@ -2408,17 +2408,21 @@ rope_src = """ layout(local_size_x = 1, local_size_y = 256, local_size_z = 1) in; layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) readonly buffer Y {int data_b[];}; -layout (binding = 2) writeonly buffer D {D_TYPE data_d[];}; +layout (binding = 1) readonly buffer Y {int data_pos[];}; +layout (binding = 2) readonly buffer Z {float data_ff[];}; +layout (binding = 3) writeonly buffer D {D_TYPE data_d[];}; layout (push_constant) uniform parameter { uint ncols; + uint n_dims; float freq_scale; uint p_delta_rows; float freq_base; float ext_factor; float attn_factor; - float corr_dims[4]; + float corr_dims[2]; + float theta_scale; + uint has_ff; } p; float rope_yarn_ramp(const float low, const float high, const uint i0) { @@ -2450,14 +2454,24 @@ void main() { return; } + if (col >= p.n_dims) { + const uint i = row*p.ncols + col; + + data_d[i + 0] = data_a[i + 0]; + data_d[i + 1] = data_a[i + 1]; + + return; + } + const uint i = row*p.ncols + col; const uint i2 = row/p.p_delta_rows; - const int pos = data_b[i2]; - const float theta_base = pos * pow(p.freq_base, -float(col)/p.ncols); + const float theta_base = data_pos[i2] * pow(p.theta_scale, col/2.0f); + + const float freq_factor = p.has_ff != 0 ? data_ff[col/2] : 1.0f; float cos_theta, sin_theta; - rope_yarn(theta_base, col, cos_theta, sin_theta); + rope_yarn(theta_base / freq_factor, col, cos_theta, sin_theta); const float x0 = float(data_a[i + 0]); const float x1 = float(data_a[i + 1]); @@ -2475,22 +2489,21 @@ rope_neox_src = """ layout(local_size_x = 1, local_size_y = 256, local_size_z = 1) in; layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) readonly buffer Y {int data_b[];}; -layout (binding = 2) readonly buffer Z {float data_freq_factors[];}; +layout (binding = 1) readonly buffer Y {int data_pos[];}; +layout (binding = 2) readonly buffer Z {float data_ff[];}; layout (binding = 3) writeonly buffer D {D_TYPE data_d[];}; layout (push_constant) uniform parameter { uint ncols; - uint ndims; + uint n_dims; float freq_scale; uint p_delta_rows; float freq_base; float ext_factor; float attn_factor; - float corr_dims[4]; + float corr_dims[2]; float theta_scale; - float inv_ndims; - uint has_freq_facs; + uint has_ff; } p; float rope_yarn_ramp(const float low, const float high, const uint i0) { @@ -2522,11 +2535,8 @@ void main() { return; } - const uint ib = col / p.ndims; - const uint ic = col % p.ndims; - - if (ib > 0) { - const uint i = row*p.ncols + ib*p.ndims + ic; + if (col >= p.n_dims) { + const uint i = row*p.ncols + col; data_d[i + 0] = data_a[i + 0]; data_d[i + 1] = data_a[i + 1]; @@ -2534,29 +2544,27 @@ void main() { return; } - const uint i = row*p.ncols + ib*p.ndims + ic/2; + const uint i = row*p.ncols + col/2; const uint i2 = row/p.p_delta_rows; - const int pos = data_b[i2]; - const float freq_factor = p.has_freq_facs != 0 ? data_freq_factors[ic/2] : 1.0f; - const float theta_base = pos*p.freq_scale*pow(p.theta_scale, col/2.0f) / freq_factor; + const float theta_base = data_pos[i2] * pow(p.theta_scale, col/2.0f); + + const float freq_factor = p.has_ff != 0 ? data_ff[col/2] : 1.0f; float cos_theta, sin_theta; - rope_yarn(theta_base, ic, cos_theta, sin_theta); + rope_yarn(theta_base / freq_factor, col, cos_theta, sin_theta); const float x0 = float(data_a[i + 0]); - const float x1 = float(data_a[i + p.ndims/2]); + const float x1 = float(data_a[i + p.n_dims/2]); data_d[i + 0] = D_TYPE(x0*cos_theta - x1*sin_theta); - data_d[i + p.ndims/2] = D_TYPE(x0*sin_theta + x1*cos_theta); + data_d[i + p.n_dims/2] = D_TYPE(x0*sin_theta + x1*cos_theta); } """ argsort_src = """ #version 450 -#extension GL_EXT_shader_16bit_storage : require - #define BLOCK_SIZE 1024 #define ASC 0 @@ -3039,8 +3047,8 @@ async def main(): tasks.append(string_to_spv("soft_max_f32", f"{soft_max_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "C_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("soft_max_f32_f16", f"{soft_max_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float16_t", "C_TYPE": "float16_t", "D_TYPE": "float"})) - tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("rope_norm_f32", rope_norm_src, {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rope_norm_f16", rope_norm_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) From 73bac2b11d7d3e20982fc9ee607625836387db8b Mon Sep 17 00:00:00 2001 From: "k.h.lai" Date: Wed, 12 Jun 2024 03:26:05 +0800 Subject: [PATCH 11/18] vulkan: select only one device for single gpu with multiple drivers (#7582) --- ggml-vulkan.cpp | 82 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 78 insertions(+), 4 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 05cfa3159..06ba23313 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1,5 +1,5 @@ #include "ggml-vulkan.h" - +#include #ifdef GGML_VULKAN_RUN_TESTS #include #endif @@ -9,12 +9,13 @@ #include #include #include -#include #include #include #include #include #include +#include +#include #include "ggml.h" #include "ggml-backend-impl.h" @@ -1555,8 +1556,10 @@ static void ggml_vk_print_gpu_info(size_t idx) { vk::PhysicalDeviceProperties2 props2; vk::PhysicalDeviceMaintenance3Properties props3; vk::PhysicalDeviceSubgroupProperties subgroup_props; + vk::PhysicalDeviceDriverProperties driver_props; props2.pNext = &props3; props3.pNext = &subgroup_props; + subgroup_props.pNext = &driver_props; physical_device.getProperties2(&props2); const size_t subgroup_size = subgroup_props.subgroupSize; @@ -1600,7 +1603,7 @@ static void ggml_vk_print_gpu_info(size_t idx) { fp16 = fp16 && vk12_features.shaderFloat16; std::string device_name = props2.properties.deviceName.data(); - std::cerr << GGML_VK_NAME << idx << ": " << device_name << " | uma: " << uma << " | fp16: " << fp16 << " | warp size: " << subgroup_size << std::endl; + std::cerr << GGML_VK_NAME << idx << ": " << device_name << " (" << driver_props.driverName << ") | uma: " << uma << " | fp16: " << fp16 << " | warp size: " << subgroup_size << std::endl; if (props2.properties.deviceType == vk::PhysicalDeviceType::eCpu) { std::cerr << "ggml_vulkan: Warning: Device type is CPU. This is probably not the device you want." << std::endl; @@ -1696,7 +1699,78 @@ void ggml_vk_instance_init() { vk::PhysicalDeviceProperties props = devices[i].getProperties(); if (props.deviceType == vk::PhysicalDeviceType::eDiscreteGpu) { - vk_instance.device_indices.push_back(i); + // Check if there are two physical devices corresponding to the same GPU + auto old_device = std::find_if( + vk_instance.device_indices.begin(), + vk_instance.device_indices.end(), + [&devices, &props](const size_t k){ return devices[k].getProperties().deviceID == props.deviceID; } + ); + if (old_device == vk_instance.device_indices.end()) { + vk_instance.device_indices.push_back(i); + } else { + // There can be two physical devices corresponding to the same GPU if there are 2 different drivers + // This can cause error when splitting layers aross the devices, need to keep only 1 +#ifdef GGML_VULKAN_DEBUG + std::cerr << "Device " << i << " and device " << *old_device << " have the same device id" << std::endl; +#endif + + vk::PhysicalDeviceProperties2 old_prop; + vk::PhysicalDeviceDriverProperties old_driver; + old_prop.pNext = &old_driver; + devices[*old_device].getProperties2(&old_prop); + + vk::PhysicalDeviceProperties2 new_prop; + vk::PhysicalDeviceDriverProperties new_driver; + new_prop.pNext = &new_driver; + devices[i].getProperties2(&new_prop); + + std::map driver_priorities {}; + int old_priority = std::numeric_limits::max(); + int new_priority = std::numeric_limits::max(); + + // Check https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkDriverId.html for the list of driver id + // Smaller number -> higher priority + switch (old_prop.properties.vendorID) { + case VK_VENDOR_ID_AMD: + driver_priorities[vk::DriverId::eMesaRadv] = 1; + driver_priorities[vk::DriverId::eAmdOpenSource] = 2; + driver_priorities[vk::DriverId::eAmdProprietary] = 3; + break; + case VK_VENDOR_ID_INTEL: + driver_priorities[vk::DriverId::eIntelOpenSourceMESA] = 1; + driver_priorities[vk::DriverId::eIntelProprietaryWindows] = 2; + break; + case VK_VENDOR_ID_NVIDIA: + driver_priorities[vk::DriverId::eNvidiaProprietary] = 1; +#if defined(VK_API_VERSION_1_3) && VK_HEADER_VERSION >= 235 + driver_priorities[vk::DriverId::eMesaNvk] = 2; +#endif + break; + } + + if (driver_priorities.count(old_driver.driverID)) { + old_priority = driver_priorities[old_driver.driverID]; + } + if (driver_priorities.count(new_driver.driverID)) { + new_priority = driver_priorities[new_driver.driverID]; + } + + if (new_priority < old_priority) { + auto r = std::remove(vk_instance.device_indices.begin(), vk_instance.device_indices.end(), *old_device); + vk_instance.device_indices.erase(r, vk_instance.device_indices.end()); + vk_instance.device_indices.push_back(i); + +#ifdef GGML_VULKAN_DEBUG + std::cerr << "Prioritize device " << i << " driver " << new_driver.driverName << " over device " << *old_device << " driver " << old_driver.driverName << std::endl; +#endif + } +#ifdef GGML_VULKAN_DEBUG + else { + std::cerr << "Prioritize device " << *old_device << " driver " << old_driver.driverName << " over device " << i << " driver " << new_driver.driverName << std::endl; + + } +#endif + } } } From f2b5764beb35583295e2475479c18f249b139b58 Mon Sep 17 00:00:00 2001 From: Patrice Ferlet Date: Wed, 12 Jun 2024 03:18:16 +0200 Subject: [PATCH 12/18] Fix a typo and add Fedora 40 pacakge to install for Vulkan (#7794) [no ci] Fix "appropiate" to "appropriate" and add Fedora 40 packages to install to compile with Vulkan support --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ecb9d00db..8c065aace 100644 --- a/README.md +++ b/README.md @@ -576,7 +576,9 @@ Building the program with BLAS support may lead to some performance improvements vulkaninfo ``` - Alternatively your package manager might be able to provide the appropiate libraries. For example for Ubuntu 22.04 you can install `libvulkan-dev` instead. + Alternatively your package manager might be able to provide the appropriate libraries. + For example for Ubuntu 22.04 you can install `libvulkan-dev` instead. + For Fedora 40, you can install `vulkan-devel`, `glslc` and `glslang` packages. Then, build llama.cpp using the cmake command below: From dcf752707d96eb305f546526c7bc5d01f0831130 Mon Sep 17 00:00:00 2001 From: "Meng, Hengyu" Date: Wed, 12 Jun 2024 17:05:35 +0800 Subject: [PATCH 13/18] update intel docker oneapi-basekit to 2024.1.1-devel-ubuntu22.04 (#7894) In addition this reverts a workaround we had to do to workaround the upstream issue with expired intel GPG package keys in 2024.0.1-devel-ubuntu22.04 --- .devops/main-intel.Dockerfile | 10 +--------- .devops/server-intel.Dockerfile | 18 +----------------- 2 files changed, 2 insertions(+), 26 deletions(-) diff --git a/.devops/main-intel.Dockerfile b/.devops/main-intel.Dockerfile index 7516c8313..b7992f47b 100644 --- a/.devops/main-intel.Dockerfile +++ b/.devops/main-intel.Dockerfile @@ -1,15 +1,7 @@ -ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 +ARG ONEAPI_VERSION=2024.1.1-devel-ubuntu22.04 FROM intel/oneapi-basekit:$ONEAPI_VERSION as build -RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \ - echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \ - chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \ - rm /etc/apt/sources.list.d/intel-graphics.list && \ - wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \ - echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \ - chmod 644 /usr/share/keyrings/intel-graphics.gpg - ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git diff --git a/.devops/server-intel.Dockerfile b/.devops/server-intel.Dockerfile index 13d00b737..c5adcb6da 100644 --- a/.devops/server-intel.Dockerfile +++ b/.devops/server-intel.Dockerfile @@ -1,15 +1,7 @@ -ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 +ARG ONEAPI_VERSION=2024.1.1-devel-ubuntu22.04 FROM intel/oneapi-basekit:$ONEAPI_VERSION as build -RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \ - echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \ - chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \ - rm /etc/apt/sources.list.d/intel-graphics.list && \ - wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \ - echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \ - chmod 644 /usr/share/keyrings/intel-graphics.gpg - ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git libcurl4-openssl-dev @@ -27,14 +19,6 @@ RUN if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime -RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \ - echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \ - chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \ - rm /etc/apt/sources.list.d/intel-graphics.list && \ - wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \ - echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \ - chmod 644 /usr/share/keyrings/intel-graphics.gpg - RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev From 704a35b183748954013bd875bbbfdd9eaca14e62 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 12 Jun 2024 14:42:29 +0300 Subject: [PATCH 14/18] server : restore numeric prompts (#7883) --- examples/server/server.cpp | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 80714fa58..919078f2b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -147,7 +147,7 @@ struct server_slot { int32_t n_prompt_tokens = 0; int32_t n_prompt_tokens_processed = 0; - std::string prompt; + json prompt; // can be either a string, array of strings or array of token ids // when a task is submitted, we first tokenize the prompt and store it here std::vector prompt_tokens; @@ -822,8 +822,13 @@ struct server_context { continue; } + // skip the slot if it does not contains prompt + if (!slot.prompt.is_string()) { + continue; + } + // current slot's prompt - std::string slot_prompt = slot.prompt; + std::string slot_prompt = slot.prompt.get(); // length of the current slot's prompt int slot_prompt_len = slot_prompt.size(); @@ -957,12 +962,12 @@ struct server_context { return false; } - if (prompt->is_string()) { - slot.prompt = prompt->get(); - } else if (prompt->is_array() && prompt->size() == 1 && prompt->at(0).is_string()) { - slot.prompt = prompt->at(0).get(); + if ((prompt->is_string()) || + (prompt->is_array() && prompt->size() == 1 && prompt->at(0).is_string()) || + (prompt->is_array() && !prompt->empty() && prompt->at(0).is_number_integer())) { + slot.prompt = *prompt; } else { - send_error(task, "\"prompt\" must be a string or an array of strings", ERROR_TYPE_INVALID_REQUEST); + send_error(task, "\"prompt\" must be a string or an array of integers", ERROR_TYPE_INVALID_REQUEST); return false; } } From bfaa676b0841617d4ef3596e63aca6be1a8eb1b5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 12 Jun 2024 15:24:20 +0300 Subject: [PATCH 15/18] ggml : improve ggml_is_contiguous logic (#7856) * ggml : improve ggml_is_contiguous logic ggml-ci * ggml : support more contiguous cases ggml-ci --- ggml.c | 75 +++++++++++++++++++++++++++------------------------------- 1 file changed, 35 insertions(+), 40 deletions(-) diff --git a/ggml.c b/ggml.c index 1fc77743b..5fb9e9a32 100644 --- a/ggml.c +++ b/ggml.c @@ -3212,35 +3212,42 @@ GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor) { return tensor->nb[0] > tensor->nb[1]; } -GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor) { - static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); +static bool ggml_is_contiguous_n(const struct ggml_tensor * tensor, int n) { + size_t next_nb = ggml_type_size(tensor->type); + if (tensor->ne[0] != ggml_blck_size(tensor->type) && tensor->nb[0] != next_nb) { + return false; + } + next_nb *= tensor->ne[0]/ggml_blck_size(tensor->type); + for (int i = 1; i < GGML_MAX_DIMS; i++) { + if (tensor->ne[i] != 1) { + if (i > n) { + if (tensor->nb[i] != next_nb) { + return false; + } + next_nb *= tensor->ne[i]; + } else { + // this dimension does not need to be contiguous + next_nb = tensor->ne[i]*tensor->nb[i]; + } + } + } + return true; +} - return - tensor->nb[0] == ggml_type_size(tensor->type) && - tensor->nb[1] == (tensor->nb[0]*tensor->ne[0])/ggml_blck_size(tensor->type) && - tensor->nb[2] == tensor->nb[1]*tensor->ne[1] && - tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; +GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor) { + return ggml_is_contiguous_0(tensor); } GGML_CALL bool ggml_is_contiguous_0(const struct ggml_tensor * tensor) { - return ggml_is_contiguous(tensor); + return ggml_is_contiguous_n(tensor, 0); } GGML_CALL bool ggml_is_contiguous_1(const struct ggml_tensor * tensor) { - static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); - - return - tensor->nb[0] == ggml_type_size(tensor->type) && - tensor->nb[2] == tensor->nb[1]*tensor->ne[1] && - tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; + return ggml_is_contiguous_n(tensor, 1); } GGML_CALL bool ggml_is_contiguous_2(const struct ggml_tensor * tensor) { - static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); - - return - tensor->nb[0] == ggml_type_size(tensor->type) && - tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; + return ggml_is_contiguous_n(tensor, 2); } GGML_CALL bool ggml_is_permuted(const struct ggml_tensor * tensor) { @@ -3272,20 +3279,20 @@ bool ggml_are_same_shape(const struct ggml_tensor * t0, const struct ggml_tensor static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return - (t0->ne[0] == t1->ne[0] ) && - (t0->ne[1] == t1->ne[1] ) && - (t0->ne[2] == t1->ne[2] ) && - (t0->ne[3] == t1->ne[3] ); + (t0->ne[0] == t1->ne[0]) && + (t0->ne[1] == t1->ne[1]) && + (t0->ne[2] == t1->ne[2]) && + (t0->ne[3] == t1->ne[3]); } bool ggml_are_same_stride(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return - (t0->nb[0] == t1->nb[0] ) && - (t0->nb[1] == t1->nb[1] ) && - (t0->nb[2] == t1->nb[2] ) && - (t0->nb[3] == t1->nb[3] ); + (t0->nb[0] == t1->nb[0]) && + (t0->nb[1] == t1->nb[1]) && + (t0->nb[2] == t1->nb[2]) && + (t0->nb[3] == t1->nb[3]); } // check if t1 can be represented as a repeatition of t0 @@ -4078,32 +4085,26 @@ float ggml_get_f32_1d(const struct ggml_tensor * tensor, int i) { switch (tensor->type) { case GGML_TYPE_I8: { - GGML_ASSERT(tensor->nb[0] == sizeof(int8_t)); return ((int8_t *)(tensor->data))[i]; } case GGML_TYPE_I16: { - GGML_ASSERT(tensor->nb[0] == sizeof(int16_t)); return ((int16_t *)(tensor->data))[i]; } case GGML_TYPE_I32: { - GGML_ASSERT(tensor->nb[0] == sizeof(int32_t)); return ((int32_t *)(tensor->data))[i]; } case GGML_TYPE_F16: { - GGML_ASSERT(tensor->nb[0] == sizeof(ggml_fp16_t)); return GGML_FP16_TO_FP32(((ggml_fp16_t *)(tensor->data))[i]); } case GGML_TYPE_BF16: { - GGML_ASSERT(tensor->nb[0] == sizeof(ggml_bf16_t)); return GGML_BF16_TO_FP32(((ggml_bf16_t *)(tensor->data))[i]); } case GGML_TYPE_F32: { - GGML_ASSERT(tensor->nb[0] == sizeof(float)); return ((float *)(tensor->data))[i]; } default: @@ -4125,32 +4126,26 @@ void ggml_set_f32_1d(const struct ggml_tensor * tensor, int i, float value) { switch (tensor->type) { case GGML_TYPE_I8: { - GGML_ASSERT(tensor->nb[0] == sizeof(int8_t)); ((int8_t *)(tensor->data))[i] = value; } break; case GGML_TYPE_I16: { - GGML_ASSERT(tensor->nb[0] == sizeof(int16_t)); ((int16_t *)(tensor->data))[i] = value; } break; case GGML_TYPE_I32: { - GGML_ASSERT(tensor->nb[0] == sizeof(int32_t)); ((int32_t *)(tensor->data))[i] = value; } break; case GGML_TYPE_F16: { - GGML_ASSERT(tensor->nb[0] == sizeof(ggml_fp16_t)); ((ggml_fp16_t *)(tensor->data))[i] = GGML_FP32_TO_FP16(value); } break; case GGML_TYPE_BF16: { - GGML_ASSERT(tensor->nb[0] == sizeof(ggml_bf16_t)); ((ggml_bf16_t *)(tensor->data))[i] = GGML_FP32_TO_BF16(value); } break; case GGML_TYPE_F32: { - GGML_ASSERT(tensor->nb[0] == sizeof(float)); ((float *)(tensor->data))[i] = value; } break; default: @@ -7343,7 +7338,7 @@ struct ggml_tensor * ggml_add_rel_pos_inplace( return ggml_add_rel_pos_impl(ctx, a, pw, ph, true); } -// gmml_unary +// ggml_unary static struct ggml_tensor * ggml_unary_impl( struct ggml_context * ctx, From a9cae48003dfc4fe95b8f5c81682fc6e63425235 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 12 Jun 2024 16:00:22 +0300 Subject: [PATCH 16/18] tests : add non-cont unary tests (#7857) * tests : add non-cont unary tests * ggml : update unary asserts and "supports_op" ggml-ci --- ggml-cuda.cu | 2 +- ggml-cuda/unary.cu | 20 ++++++++ ggml-kompute.cpp | 2 +- ggml-metal.m | 2 +- ggml-sycl.cpp | 2 +- ggml-vulkan.cpp | 2 +- ggml.c | 97 ++++++++++++++++++-------------------- tests/test-backend-ops.cpp | 29 ++++++++---- 8 files changed, 90 insertions(+), 66 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index af10f21a0..c6bc3f64c 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -2740,7 +2740,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_UNARY_OP_HARDSWISH: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_TANH: - return true; + return ggml_is_contiguous(op->src[0]); default: return false; } diff --git a/ggml-cuda/unary.cu b/ggml-cuda/unary.cu index ac03d5c6f..a5ff96320 100644 --- a/ggml-cuda/unary.cu +++ b/ggml-cuda/unary.cu @@ -148,6 +148,8 @@ void ggml_cuda_op_gelu(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -160,6 +162,8 @@ void ggml_cuda_op_silu(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -172,6 +176,8 @@ void ggml_cuda_op_gelu_quick(ggml_backend_cuda_context & ctx, ggml_tensor * dst) float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -184,6 +190,8 @@ void ggml_cuda_op_tanh(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -196,6 +204,8 @@ void ggml_cuda_op_relu(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -208,6 +218,8 @@ void ggml_cuda_op_sigmoid(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -220,6 +232,8 @@ void ggml_cuda_op_hardsigmoid(ggml_backend_cuda_context & ctx, ggml_tensor * dst float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -232,6 +246,8 @@ void ggml_cuda_op_hardswish(ggml_backend_cuda_context & ctx, ggml_tensor * dst) float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -244,6 +260,8 @@ void ggml_cuda_op_leaky_relu(ggml_backend_cuda_context & ctx, ggml_tensor * dst) float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -259,6 +277,8 @@ void ggml_cuda_op_sqr(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); diff --git a/ggml-kompute.cpp b/ggml-kompute.cpp index 5592741be..18c6f4a10 100644 --- a/ggml-kompute.cpp +++ b/ggml-kompute.cpp @@ -1340,7 +1340,7 @@ static bool ggml_vk_supports_op(const struct ggml_tensor * op) { case GGML_UNARY_OP_RELU: case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: - return true; + return ggml_is_contiguous(op->src[0]); default: ; } diff --git a/ggml-metal.m b/ggml-metal.m index 946f11813..b5c287347 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -744,7 +744,7 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_SILU: - return true; + return ggml_is_contiguous(op->src[0]); default: return false; } diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 42fc0df20..e7d260bd4 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -17190,7 +17190,7 @@ GGML_CALL static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, cons case GGML_UNARY_OP_HARDSWISH: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_TANH: - return true; + return ggml_is_contiguous(op->src[0]); default: return false; } diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 06ba23313..5b9280491 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -6439,7 +6439,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_RELU: - return true; + return ggml_is_contiguous(op->src[0]); default: return false; } diff --git a/ggml.c b/ggml.c index 5fb9e9a32..2ea1d7677 100644 --- a/ggml.c +++ b/ggml.c @@ -7345,6 +7345,8 @@ static struct ggml_tensor * ggml_unary_impl( struct ggml_tensor * a, enum ggml_unary_op op, bool inplace) { + GGML_ASSERT(ggml_is_contiguous_1(a)); + bool is_node = false; if (!inplace && (a->grad)) { @@ -11009,6 +11011,8 @@ static void ggml_compute_forward_abs_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11018,9 +11022,6 @@ static void ggml_compute_forward_abs_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_abs_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11055,6 +11056,8 @@ static void ggml_compute_forward_sgn_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11064,9 +11067,6 @@ static void ggml_compute_forward_sgn_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_sgn_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11101,6 +11101,8 @@ static void ggml_compute_forward_neg_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11110,9 +11112,6 @@ static void ggml_compute_forward_neg_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_neg_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11147,6 +11146,8 @@ static void ggml_compute_forward_step_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11156,9 +11157,6 @@ static void ggml_compute_forward_step_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_step_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11193,6 +11191,8 @@ static void ggml_compute_forward_tanh_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11202,9 +11202,6 @@ static void ggml_compute_forward_tanh_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_tanh_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11239,6 +11236,8 @@ static void ggml_compute_forward_elu_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11248,9 +11247,6 @@ static void ggml_compute_forward_elu_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_elu_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11285,6 +11281,8 @@ static void ggml_compute_forward_relu_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11294,9 +11292,6 @@ static void ggml_compute_forward_relu_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_relu_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11331,6 +11326,8 @@ static void ggml_compute_forward_sigmoid_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11340,9 +11337,6 @@ static void ggml_compute_forward_sigmoid_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_sigmoid_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11376,9 +11370,9 @@ static void ggml_compute_forward_gelu_f32( const struct ggml_tensor * src0 = dst->src[0]; - GGML_ASSERT(ggml_is_contiguous_1(src0)); - GGML_ASSERT(ggml_is_contiguous_1(dst)); - GGML_ASSERT(ggml_are_same_shape(src0, dst)); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; @@ -11439,9 +11433,9 @@ static void ggml_compute_forward_gelu_quick_f32( const struct ggml_tensor * src0 = dst->src[0]; - GGML_ASSERT(ggml_is_contiguous_1(src0)); - GGML_ASSERT(ggml_is_contiguous_1(dst)); - GGML_ASSERT(ggml_are_same_shape(src0, dst)); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; @@ -11502,9 +11496,9 @@ static void ggml_compute_forward_silu_f32( const struct ggml_tensor * src0 = dst->src[0]; - GGML_ASSERT(ggml_is_contiguous_1(src0)); - GGML_ASSERT(ggml_is_contiguous_1(dst)); - GGML_ASSERT(ggml_are_same_shape(src0, dst)); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; @@ -11565,6 +11559,8 @@ static void ggml_compute_forward_leaky_relu_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11614,11 +11610,11 @@ static void ggml_compute_forward_silu_back_f32( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * grad = dst->src[1]; - GGML_ASSERT(ggml_is_contiguous_1(grad)); - GGML_ASSERT(ggml_is_contiguous_1(src0)); - GGML_ASSERT(ggml_is_contiguous_1(dst)); - GGML_ASSERT(ggml_are_same_shape(src0, dst)); - GGML_ASSERT(ggml_are_same_shape(src0, grad)); + assert(ggml_is_contiguous_1(grad)); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + assert(ggml_are_same_shape(src0, grad)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; @@ -11680,6 +11676,8 @@ static void ggml_compute_forward_hardswish_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11689,9 +11687,6 @@ static void ggml_compute_forward_hardswish_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_hardswish_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -11723,6 +11718,8 @@ static void ggml_compute_forward_hardsigmoid_f32( const struct ggml_tensor * src0 = dst->src[0]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -11732,9 +11729,6 @@ static void ggml_compute_forward_hardsigmoid_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert(dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { ggml_vec_hardsigmoid_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -16681,7 +16675,10 @@ static void ggml_compute_forward_map_unary_f32( const struct ggml_tensor * src0 = dst->src[0]; - GGML_ASSERT(ggml_are_same_shape(src0, dst)); + assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; @@ -16690,9 +16687,6 @@ static void ggml_compute_forward_map_unary_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert( dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { fun(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), @@ -16730,6 +16724,9 @@ static void ggml_compute_forward_map_binary_f32( const struct ggml_tensor * src1 = dst->src[1]; assert(params->ith == 0); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(src1)); + assert(ggml_is_contiguous_1(dst)); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { @@ -16739,10 +16736,6 @@ static void ggml_compute_forward_map_binary_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; - assert( dst->nb[0] == sizeof(float)); - assert(src0->nb[0] == sizeof(float)); - assert(src1->nb[0] == sizeof(float)); - for (int i = 0; i < n; i++) { fun(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index ce406a8af..2b48e623e 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -642,20 +642,29 @@ struct test_case { struct test_unary : public test_case { const ggml_unary_op op; const ggml_type type; - const std::array ne; + const std::array ne_a; + int v; // view (1 : non-contiguous a) std::string vars() override { - return VARS_TO_STR2(type, ne); + return VARS_TO_STR3(type, ne_a, v); } test_unary(ggml_unary_op op, ggml_type type = GGML_TYPE_F32, - std::array ne = {128, 10, 10, 10}) - : op(op), type(type), ne(ne) {} + std::array ne_a = {128, 10, 10, 10}, + int v = 0) + : op(op), type(type), ne_a(ne_a), v(v) {} ggml_tensor * build_graph(ggml_context * ctx) override { - ggml_tensor * in = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * out = ggml_unary(ctx, in, op); + ggml_tensor * a; + if (v & 1) { + auto ne = ne_a; ne[0] *= 3; + a = ggml_new_tensor(ctx, type, 4, ne.data()); + a = ggml_view_4d(ctx, a, ne_a[0], ne_a[1], ne_a[2], ne_a[3], a->nb[1], a->nb[2], a->nb[3], 0); + } else { + a = ggml_new_tensor(ctx, type, 4, ne_a.data()); + } + ggml_tensor * out = ggml_unary(ctx, a, op); return out; } @@ -2016,9 +2025,11 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op }; // unary ops - for (int op = 0; op < GGML_UNARY_OP_COUNT; op++) { - test_cases.emplace_back(new test_unary((ggml_unary_op) op)); - test_cases.emplace_back(new test_unary((ggml_unary_op) op, GGML_TYPE_F32, { 7, 13, 19, 23 })); + for (int v : {0, 1}) { + for (int op = 0; op < GGML_UNARY_OP_COUNT; op++) { + test_cases.emplace_back(new test_unary((ggml_unary_op) op, GGML_TYPE_F32, { 128, 10, 10, 10 }, v)); + test_cases.emplace_back(new test_unary((ggml_unary_op) op, GGML_TYPE_F32, { 7, 13, 19, 23 }, v)); + } } test_cases.emplace_back(new test_get_rows(GGML_TYPE_F32, 1, 8, 2, 1, false)); From 963552903f51043ee947a8deeaaa7ec00bc3f1a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 12 Jun 2024 17:41:51 +0200 Subject: [PATCH 17/18] CUDA: fix broken oob check for FA vec f32 kernel (#7904) --- ggml-cuda/fattn-vec-f32.cuh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda/fattn-vec-f32.cuh b/ggml-cuda/fattn-vec-f32.cuh index ddf0c8374..11a5e355f 100644 --- a/ggml-cuda/fattn-vec-f32.cuh +++ b/ggml-cuda/fattn-vec-f32.cuh @@ -149,7 +149,7 @@ static __global__ void flash_attn_vec_ext_f32( for (int i0 = 0; i0 < D/2; i0 += WARP_SIZE) { const int i = i0 + threadIdx.x; - Q_f2[j][i0/WARP_SIZE] = ncols <= 2 || ic0 + j ? Q_f2_j[i] : make_float2(0.0f, 0.0f); + Q_f2[j][i0/WARP_SIZE] = ncols <= 2 || ic0 + j < ne01 ? Q_f2_j[i] : make_float2(0.0f, 0.0f); Q_f2[j][i0/WARP_SIZE].x *= scale; Q_f2[j][i0/WARP_SIZE].y *= scale; } From 1c641e6aac5c18b964e7b32d9dbbb4bf5301d0d7 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Thu, 13 Jun 2024 00:41:52 +0100 Subject: [PATCH 18/18] =?UTF-8?q?`build`:=20rename=20main=20=E2=86=92=20ll?= =?UTF-8?q?ama-cli,=20server=20=E2=86=92=20llama-server,=20llava-cli=20?= =?UTF-8?q?=E2=86=92=20llama-llava-cli,=20etc...=20(#7809)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * `main`/`server`: rename to `llama` / `llama-server` for consistency w/ homebrew * server: update refs -> llama-server gitignore llama-server * server: simplify nix package * main: update refs -> llama fix examples/main ref * main/server: fix targets * update more names * Update build.yml * rm accidentally checked in bins * update straggling refs * Update .gitignore * Update server-llm.sh * main: target name -> llama-cli * Prefix all example bins w/ llama- * fix main refs * rename {main->llama}-cmake-pkg binary * prefix more cmake targets w/ llama- * add/fix gbnf-validator subfolder to cmake * sort cmake example subdirs * rm bin files * fix llama-lookup-* Makefile rules * gitignore /llama-* * rename Dockerfiles * rename llama|main -> llama-cli; consistent RPM bin prefixes * fix some missing -cli suffixes * rename dockerfile w/ llama-cli * rename(make): llama-baby-llama * update dockerfile refs * more llama-cli(.exe) * fix test-eval-callback * rename: llama-cli-cmake-pkg(.exe) * address gbnf-validator unused fread warning (switched to C++ / ifstream) * add two missing llama- prefixes * Updating docs for eval-callback binary to use new `llama-` prefix. * Updating a few lingering doc references for rename of main to llama-cli * Updating `run-with-preset.py` to use new binary names. Updating docs around `perplexity` binary rename. * Updating documentation references for lookup-merge and export-lora * Updating two small `main` references missed earlier in the finetune docs. * Update apps.nix * update grammar/README.md w/ new llama-* names * update llama-rpc-server bin name + doc * Revert "update llama-rpc-server bin name + doc" This reverts commit e474ef1df481fd8936cd7d098e3065d7de378930. * add hot topic notice to README.md * Update README.md * Update README.md * rename gguf-split & quantize bins refs in **/tests.sh --------- Co-authored-by: HanClinto --- .devops/cloud-v-pipeline | 2 +- ...a.Dockerfile => llama-cli-cuda.Dockerfile} | 6 +- ....Dockerfile => llama-cli-intel.Dockerfile} | 6 +- ...m.Dockerfile => llama-cli-rocm.Dockerfile} | 4 +- ...Dockerfile => llama-cli-vulkan.Dockerfile} | 6 +- .../{main.Dockerfile => llama-cli.Dockerfile} | 6 +- .devops/llama-cpp-clblast.srpm.spec | 14 +- .devops/llama-cpp-cuda.srpm.spec | 14 +- .devops/llama-cpp.srpm.spec | 14 +- ...ockerfile => llama-server-cuda.Dockerfile} | 6 +- ...ckerfile => llama-server-intel.Dockerfile} | 6 +- ...ockerfile => llama-server-rocm.Dockerfile} | 4 +- ...kerfile => llama-server-vulkan.Dockerfile} | 6 +- ...ver.Dockerfile => llama-server.Dockerfile} | 6 +- .devops/nix/apps.nix | 6 +- .devops/nix/package.nix | 4 +- .devops/tools.sh | 10 +- .dockerignore | 4 +- .github/ISSUE_TEMPLATE/01-bug-low.yml | 2 +- .github/ISSUE_TEMPLATE/02-bug-medium.yml | 2 +- .github/ISSUE_TEMPLATE/03-bug-high.yml | 2 +- .github/ISSUE_TEMPLATE/04-bug-critical.yml | 2 +- .github/workflows/bench.yml | 2 +- .github/workflows/build.yml | 10 +- .github/workflows/docker.yml | 16 +- .github/workflows/server.yml | 4 +- .gitignore | 43 +--- Makefile | 138 +++++++---- README-sycl.md | 18 +- README.md | 33 +-- ci/run.sh | 224 +++++++++--------- docs/HOWTO-add-model.md | 2 +- docs/token_generation_performance_tips.md | 4 +- examples/CMakeLists.txt | 47 ++-- examples/Miku.sh | 2 +- examples/baby-llama/CMakeLists.txt | 2 +- examples/base-translate.sh | 2 +- examples/batched-bench/CMakeLists.txt | 2 +- examples/batched-bench/README.md | 8 +- examples/batched.swift/Makefile | 6 +- examples/batched.swift/Package.swift | 4 +- examples/batched.swift/README.md | 2 +- examples/batched/CMakeLists.txt | 2 +- examples/batched/README.md | 2 +- examples/benchmark/CMakeLists.txt | 2 +- examples/chat-13B.sh | 2 +- examples/chat-persistent.sh | 10 +- examples/chat-vicuna.sh | 2 +- examples/chat.sh | 2 +- .../convert-llama2c-to-ggml/CMakeLists.txt | 2 +- examples/convert-llama2c-to-ggml/README.md | 6 +- examples/embedding/CMakeLists.txt | 2 +- examples/embedding/README.md | 4 +- examples/eval-callback/CMakeLists.txt | 4 +- examples/eval-callback/README.md | 2 +- examples/export-lora/CMakeLists.txt | 2 +- examples/export-lora/README.md | 4 +- examples/finetune/CMakeLists.txt | 2 +- examples/finetune/README.md | 12 +- examples/finetune/finetune.sh | 2 +- examples/gbnf-validator/CMakeLists.txt | 4 +- examples/gbnf-validator/gbnf-validator.cpp | 36 ++- examples/gguf-split/CMakeLists.txt | 2 +- examples/gguf-split/tests.sh | 4 +- examples/gguf/CMakeLists.txt | 2 +- examples/gritlm/CMakeLists.txt | 2 +- examples/gritlm/README.md | 2 +- examples/imatrix/CMakeLists.txt | 2 +- examples/imatrix/README.md | 6 +- examples/infill/CMakeLists.txt | 2 +- examples/infill/README.md | 2 +- examples/jeopardy/jeopardy.sh | 2 +- examples/json-schema-pydantic-example.py | 2 +- examples/json_schema_to_grammar.py | 2 +- examples/llama-bench/README.md | 2 +- examples/llava/CMakeLists.txt | 11 +- examples/llava/MobileVLM-README.md | 18 +- examples/llava/README.md | 10 +- examples/llava/android/adb_run.sh | 2 +- examples/lookahead/CMakeLists.txt | 2 +- examples/lookup/CMakeLists.txt | 8 +- examples/lookup/lookup-merge.cpp | 8 +- examples/main-cmake-pkg/CMakeLists.txt | 8 +- examples/main-cmake-pkg/README.md | 4 +- examples/main/CMakeLists.txt | 2 +- examples/main/README.md | 24 +- examples/parallel/CMakeLists.txt | 2 +- examples/passkey/CMakeLists.txt | 2 +- examples/passkey/README.md | 2 +- examples/perplexity/CMakeLists.txt | 2 +- examples/perplexity/perplexity.cpp | 2 +- examples/quantize-stats/CMakeLists.txt | 2 +- examples/quantize/CMakeLists.txt | 2 +- examples/quantize/tests.sh | 6 +- examples/reason-act.sh | 2 +- examples/retrieval/CMakeLists.txt | 2 +- examples/retrieval/README.md | 2 +- examples/rpc/README.md | 2 +- examples/save-load-state/CMakeLists.txt | 2 +- examples/server-llama2-13B.sh | 2 +- examples/server/CMakeLists.txt | 2 +- examples/server/README.md | 22 +- examples/server/bench/README.md | 2 +- examples/server/bench/bench.py | 2 +- examples/server/public_simplechat/readme.md | 4 +- examples/server/tests/README.md | 8 +- examples/server/tests/features/steps/steps.py | 4 +- examples/simple/CMakeLists.txt | 2 +- examples/speculative/CMakeLists.txt | 2 +- examples/sycl/CMakeLists.txt | 2 +- examples/sycl/README.md | 6 +- examples/sycl/run-llama2.sh | 8 +- examples/tokenize/CMakeLists.txt | 2 +- .../train-text-from-scratch/CMakeLists.txt | 2 +- examples/train-text-from-scratch/README.md | 4 +- flake.nix | 2 +- grammars/README.md | 14 +- pocs/vdot/CMakeLists.txt | 4 +- scripts/get-hellaswag.sh | 2 +- scripts/get-wikitext-103.sh | 2 +- scripts/get-wikitext-2.sh | 2 +- scripts/get-winogrande.sh | 2 +- scripts/hf.sh | 6 +- scripts/pod-llama.sh | 56 ++--- scripts/qnt-all.sh | 2 +- scripts/run-all-ppl.sh | 2 +- scripts/run-with-preset.py | 16 +- scripts/server-llm.sh | 8 +- 128 files changed, 578 insertions(+), 578 deletions(-) rename .devops/{main-cuda.Dockerfile => llama-cli-cuda.Dockerfile} (88%) rename .devops/{main-intel.Dockerfile => llama-cli-intel.Dockerfile} (78%) rename .devops/{main-rocm.Dockerfile => llama-cli-rocm.Dockerfile} (94%) rename .devops/{main-vulkan.Dockerfile => llama-cli-vulkan.Dockerfile} (81%) rename .devops/{main.Dockerfile => llama-cli.Dockerfile} (72%) rename .devops/{server-cuda.Dockerfile => llama-server-cuda.Dockerfile} (88%) rename .devops/{server-intel.Dockerfile => llama-server-intel.Dockerfile} (80%) rename .devops/{server-rocm.Dockerfile => llama-server-rocm.Dockerfile} (94%) rename .devops/{server-vulkan.Dockerfile => llama-server-vulkan.Dockerfile} (82%) rename .devops/{server.Dockerfile => llama-server.Dockerfile} (74%) diff --git a/.devops/cloud-v-pipeline b/.devops/cloud-v-pipeline index f3a4944f8..af8c0cea6 100644 --- a/.devops/cloud-v-pipeline +++ b/.devops/cloud-v-pipeline @@ -15,7 +15,7 @@ node('x86_runner1'){ // Running on x86 runner containing latest vecto stage('Running llama.cpp'){ sh'''#!/bin/bash module load gnu-bin2/0.1 # loading latest versions of vector qemu and vector gcc - qemu-riscv64 -L /softwares/gnu-bin2/sysroot -cpu rv64,v=true,vlen=256,elen=64,vext_spec=v1.0 ./main -m /home/alitariq/codellama-7b.Q4_K_M.gguf -p "Anything" -n 9 > llama_log.txt # Running llama.cpp on vector qemu-riscv64 + qemu-riscv64 -L /softwares/gnu-bin2/sysroot -cpu rv64,v=true,vlen=256,elen=64,vext_spec=v1.0 ./llama-cli -m /home/alitariq/codellama-7b.Q4_K_M.gguf -p "Anything" -n 9 > llama_log.txt # Running llama.cpp on vector qemu-riscv64 cat llama_log.txt # Printing results ''' } diff --git a/.devops/main-cuda.Dockerfile b/.devops/llama-cli-cuda.Dockerfile similarity index 88% rename from .devops/main-cuda.Dockerfile rename to .devops/llama-cli-cuda.Dockerfile index 2aec4a85d..d5ce538f6 100644 --- a/.devops/main-cuda.Dockerfile +++ b/.devops/llama-cli-cuda.Dockerfile @@ -23,13 +23,13 @@ ENV CUDA_DOCKER_ARCH=${CUDA_DOCKER_ARCH} # Enable CUDA ENV LLAMA_CUDA=1 -RUN make -j$(nproc) main +RUN make -j$(nproc) llama-cli FROM ${BASE_CUDA_RUN_CONTAINER} as runtime RUN apt-get update && \ apt-get install -y libgomp1 -COPY --from=build /app/main /main +COPY --from=build /app/llama-cli /llama-cli -ENTRYPOINT [ "/main" ] +ENTRYPOINT [ "/llama-cli" ] diff --git a/.devops/main-intel.Dockerfile b/.devops/llama-cli-intel.Dockerfile similarity index 78% rename from .devops/main-intel.Dockerfile rename to .devops/llama-cli-intel.Dockerfile index b7992f47b..6789e17af 100644 --- a/.devops/main-intel.Dockerfile +++ b/.devops/llama-cli-intel.Dockerfile @@ -15,12 +15,12 @@ RUN if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ fi && \ cmake -B build -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx ${OPT_SYCL_F16} && \ - cmake --build build --config Release --target main + cmake --build build --config Release --target llama-cli FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime -COPY --from=build /app/build/bin/main /main +COPY --from=build /app/build/bin/llama-cli /llama-cli ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/main" ] +ENTRYPOINT [ "/llama-cli" ] diff --git a/.devops/main-rocm.Dockerfile b/.devops/llama-cli-rocm.Dockerfile similarity index 94% rename from .devops/main-rocm.Dockerfile rename to .devops/llama-cli-rocm.Dockerfile index dcaeb3e72..7e8a6f0fa 100644 --- a/.devops/main-rocm.Dockerfile +++ b/.devops/llama-cli-rocm.Dockerfile @@ -40,6 +40,6 @@ ENV LLAMA_HIPBLAS=1 ENV CC=/opt/rocm/llvm/bin/clang ENV CXX=/opt/rocm/llvm/bin/clang++ -RUN make -j$(nproc) main +RUN make -j$(nproc) llama-cli -ENTRYPOINT [ "/app/main" ] +ENTRYPOINT [ "/app/llama-cli" ] diff --git a/.devops/main-vulkan.Dockerfile b/.devops/llama-cli-vulkan.Dockerfile similarity index 81% rename from .devops/main-vulkan.Dockerfile rename to .devops/llama-cli-vulkan.Dockerfile index 1bdb52803..7a0abe71f 100644 --- a/.devops/main-vulkan.Dockerfile +++ b/.devops/llama-cli-vulkan.Dockerfile @@ -15,13 +15,13 @@ RUN wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key WORKDIR /app COPY . . RUN cmake -B build -DLLAMA_VULKAN=1 && \ - cmake --build build --config Release --target main + cmake --build build --config Release --target llama-cli # Clean up WORKDIR / -RUN cp /app/build/bin/main /main && \ +RUN cp /app/build/bin/llama-cli /llama-cli && \ rm -rf /app ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/main" ] +ENTRYPOINT [ "/llama-cli" ] diff --git a/.devops/main.Dockerfile b/.devops/llama-cli.Dockerfile similarity index 72% rename from .devops/main.Dockerfile rename to .devops/llama-cli.Dockerfile index d2514c4ba..38382bfc9 100644 --- a/.devops/main.Dockerfile +++ b/.devops/llama-cli.Dockerfile @@ -9,15 +9,15 @@ WORKDIR /app COPY . . -RUN make -j$(nproc) main +RUN make -j$(nproc) llama-cli FROM ubuntu:$UBUNTU_VERSION as runtime RUN apt-get update && \ apt-get install -y libgomp1 -COPY --from=build /app/main /main +COPY --from=build /app/llama-cli /llama-cli ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/main" ] +ENTRYPOINT [ "/llama-cli" ] diff --git a/.devops/llama-cpp-clblast.srpm.spec b/.devops/llama-cpp-clblast.srpm.spec index 774f63ddd..013952191 100644 --- a/.devops/llama-cpp-clblast.srpm.spec +++ b/.devops/llama-cpp-clblast.srpm.spec @@ -36,9 +36,9 @@ make -j LLAMA_CLBLAST=1 %install mkdir -p %{buildroot}%{_bindir}/ -cp -p main %{buildroot}%{_bindir}/llamaclblast -cp -p server %{buildroot}%{_bindir}/llamaclblastserver -cp -p simple %{buildroot}%{_bindir}/llamaclblastsimple +cp -p llama-cli %{buildroot}%{_bindir}/llama-clblast-cli +cp -p llama-server %{buildroot}%{_bindir}/llama-clblast-server +cp -p llama-simple %{buildroot}%{_bindir}/llama-clblast-simple mkdir -p %{buildroot}/usr/lib/systemd/system %{__cat} < %{buildroot}/usr/lib/systemd/system/llamaclblast.service @@ -49,7 +49,7 @@ After=syslog.target network.target local-fs.target remote-fs.target nss-lookup.t [Service] Type=simple EnvironmentFile=/etc/sysconfig/llama -ExecStart=/usr/bin/llamaclblastserver $LLAMA_ARGS +ExecStart=/usr/bin/llama-clblast-server $LLAMA_ARGS ExecReload=/bin/kill -s HUP $MAINPID Restart=never @@ -67,9 +67,9 @@ rm -rf %{buildroot} rm -rf %{_builddir}/* %files -%{_bindir}/llamaclblast -%{_bindir}/llamaclblastserver -%{_bindir}/llamaclblastsimple +%{_bindir}/llama-clblast-cli +%{_bindir}/llama-clblast-server +%{_bindir}/llama-clblast-simple /usr/lib/systemd/system/llamaclblast.service %config /etc/sysconfig/llama diff --git a/.devops/llama-cpp-cuda.srpm.spec b/.devops/llama-cpp-cuda.srpm.spec index ba9cb7cbb..cbdf43626 100644 --- a/.devops/llama-cpp-cuda.srpm.spec +++ b/.devops/llama-cpp-cuda.srpm.spec @@ -36,9 +36,9 @@ make -j LLAMA_CUDA=1 %install mkdir -p %{buildroot}%{_bindir}/ -cp -p main %{buildroot}%{_bindir}/llamacppcuda -cp -p server %{buildroot}%{_bindir}/llamacppcudaserver -cp -p simple %{buildroot}%{_bindir}/llamacppcudasimple +cp -p llama-cli %{buildroot}%{_bindir}/llama-cuda-cli +cp -p llama-server %{buildroot}%{_bindir}/llama-cuda-server +cp -p llama-simple %{buildroot}%{_bindir}/llama-cuda-simple mkdir -p %{buildroot}/usr/lib/systemd/system %{__cat} < %{buildroot}/usr/lib/systemd/system/llamacuda.service @@ -49,7 +49,7 @@ After=syslog.target network.target local-fs.target remote-fs.target nss-lookup.t [Service] Type=simple EnvironmentFile=/etc/sysconfig/llama -ExecStart=/usr/bin/llamacppcudaserver $LLAMA_ARGS +ExecStart=/usr/bin/llama-cuda-server $LLAMA_ARGS ExecReload=/bin/kill -s HUP $MAINPID Restart=never @@ -67,9 +67,9 @@ rm -rf %{buildroot} rm -rf %{_builddir}/* %files -%{_bindir}/llamacppcuda -%{_bindir}/llamacppcudaserver -%{_bindir}/llamacppcudasimple +%{_bindir}/llama-cuda-cli +%{_bindir}/llama-cuda-server +%{_bindir}/llama-cuda-simple /usr/lib/systemd/system/llamacuda.service %config /etc/sysconfig/llama diff --git a/.devops/llama-cpp.srpm.spec b/.devops/llama-cpp.srpm.spec index 1d9e4f425..4d5560089 100644 --- a/.devops/llama-cpp.srpm.spec +++ b/.devops/llama-cpp.srpm.spec @@ -38,9 +38,9 @@ make -j %install mkdir -p %{buildroot}%{_bindir}/ -cp -p main %{buildroot}%{_bindir}/llama -cp -p server %{buildroot}%{_bindir}/llamaserver -cp -p simple %{buildroot}%{_bindir}/llamasimple +cp -p llama-cli %{buildroot}%{_bindir}/llama-cli +cp -p llama-server %{buildroot}%{_bindir}/llama-server +cp -p llama-simple %{buildroot}%{_bindir}/llama-simple mkdir -p %{buildroot}/usr/lib/systemd/system %{__cat} < %{buildroot}/usr/lib/systemd/system/llama.service @@ -51,7 +51,7 @@ After=syslog.target network.target local-fs.target remote-fs.target nss-lookup.t [Service] Type=simple EnvironmentFile=/etc/sysconfig/llama -ExecStart=/usr/bin/llamaserver $LLAMA_ARGS +ExecStart=/usr/bin/llama-server $LLAMA_ARGS ExecReload=/bin/kill -s HUP $MAINPID Restart=never @@ -69,9 +69,9 @@ rm -rf %{buildroot} rm -rf %{_builddir}/* %files -%{_bindir}/llama -%{_bindir}/llamaserver -%{_bindir}/llamasimple +%{_bindir}/llama-cli +%{_bindir}/llama-server +%{_bindir}/llama-simple /usr/lib/systemd/system/llama.service %config /etc/sysconfig/llama diff --git a/.devops/server-cuda.Dockerfile b/.devops/llama-server-cuda.Dockerfile similarity index 88% rename from .devops/server-cuda.Dockerfile rename to .devops/llama-server-cuda.Dockerfile index 4e9747b82..0010ffd4c 100644 --- a/.devops/server-cuda.Dockerfile +++ b/.devops/llama-server-cuda.Dockerfile @@ -25,13 +25,13 @@ ENV LLAMA_CUDA=1 # Enable cURL ENV LLAMA_CURL=1 -RUN make -j$(nproc) server +RUN make -j$(nproc) llama-server FROM ${BASE_CUDA_RUN_CONTAINER} as runtime RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev libgomp1 -COPY --from=build /app/server /server +COPY --from=build /app/llama-server /llama-server -ENTRYPOINT [ "/server" ] +ENTRYPOINT [ "/llama-server" ] diff --git a/.devops/server-intel.Dockerfile b/.devops/llama-server-intel.Dockerfile similarity index 80% rename from .devops/server-intel.Dockerfile rename to .devops/llama-server-intel.Dockerfile index c5adcb6da..cec436452 100644 --- a/.devops/server-intel.Dockerfile +++ b/.devops/llama-server-intel.Dockerfile @@ -15,15 +15,15 @@ RUN if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ fi && \ cmake -B build -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_CURL=ON ${OPT_SYCL_F16} && \ - cmake --build build --config Release --target server + cmake --build build --config Release --target llama-server FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev -COPY --from=build /app/build/bin/server /server +COPY --from=build /app/build/bin/llama-server /llama-server ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/server" ] +ENTRYPOINT [ "/llama-server" ] diff --git a/.devops/server-rocm.Dockerfile b/.devops/llama-server-rocm.Dockerfile similarity index 94% rename from .devops/server-rocm.Dockerfile rename to .devops/llama-server-rocm.Dockerfile index a6b76dee8..f88cf20e5 100644 --- a/.devops/server-rocm.Dockerfile +++ b/.devops/llama-server-rocm.Dockerfile @@ -45,6 +45,6 @@ ENV LLAMA_CURL=1 RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev -RUN make -j$(nproc) +RUN make -j$(nproc) llama-server -ENTRYPOINT [ "/app/server" ] +ENTRYPOINT [ "/app/llama-server" ] diff --git a/.devops/server-vulkan.Dockerfile b/.devops/llama-server-vulkan.Dockerfile similarity index 82% rename from .devops/server-vulkan.Dockerfile rename to .devops/llama-server-vulkan.Dockerfile index 6e757e171..b0fa0b8e6 100644 --- a/.devops/server-vulkan.Dockerfile +++ b/.devops/llama-server-vulkan.Dockerfile @@ -19,13 +19,13 @@ RUN apt-get update && \ WORKDIR /app COPY . . RUN cmake -B build -DLLAMA_VULKAN=1 -DLLAMA_CURL=1 && \ - cmake --build build --config Release --target server + cmake --build build --config Release --target llama-server # Clean up WORKDIR / -RUN cp /app/build/bin/server /server && \ +RUN cp /app/build/bin/llama-server /llama-server && \ rm -rf /app ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/server" ] +ENTRYPOINT [ "/llama-server" ] diff --git a/.devops/server.Dockerfile b/.devops/llama-server.Dockerfile similarity index 74% rename from .devops/server.Dockerfile rename to .devops/llama-server.Dockerfile index bee63b966..aa93369be 100644 --- a/.devops/server.Dockerfile +++ b/.devops/llama-server.Dockerfile @@ -11,15 +11,15 @@ COPY . . ENV LLAMA_CURL=1 -RUN make -j$(nproc) server +RUN make -j$(nproc) llama-server FROM ubuntu:$UBUNTU_VERSION as runtime RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev libgomp1 -COPY --from=build /app/server /server +COPY --from=build /app/llama-server /llama-server ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/server" ] +ENTRYPOINT [ "/llama-server" ] diff --git a/.devops/nix/apps.nix b/.devops/nix/apps.nix index b8a12cc0a..897fce4d3 100644 --- a/.devops/nix/apps.nix +++ b/.devops/nix/apps.nix @@ -6,11 +6,11 @@ let inherit (config.packages) default; binaries = [ - "llama" + "llama-cli" "llama-embedding" "llama-server" - "quantize" - "train-text-from-scratch" + "llama-quantize" + "llama-train-text-from-scratch" ]; mkApp = name: { type = "app"; diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index e8d5b0bd9..87bb3a20f 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -243,8 +243,6 @@ effectiveStdenv.mkDerivation ( # TODO(SomeoneSerge): It's better to add proper install targets at the CMake level, # if they haven't been added yet. postInstall = '' - mv $out/bin/main${executableSuffix} $out/bin/llama${executableSuffix} - mv $out/bin/server${executableSuffix} $out/bin/llama-server${executableSuffix} mkdir -p $out/include cp $src/llama.h $out/include/ ''; @@ -294,7 +292,7 @@ effectiveStdenv.mkDerivation ( license = lib.licenses.mit; # Accommodates `nix run` and `lib.getExe` - mainProgram = "llama"; + mainProgram = "llama-cli"; # These people might respond, on the best effort basis, if you ping them # in case of Nix-specific regressions or for reviewing Nix-specific PRs. diff --git a/.devops/tools.sh b/.devops/tools.sh index 97424c3aa..335382f69 100755 --- a/.devops/tools.sh +++ b/.devops/tools.sh @@ -10,11 +10,11 @@ shift if [[ "$arg1" == '--convert' || "$arg1" == '-c' ]]; then python3 ./convert-hf-to-gguf.py "$@" elif [[ "$arg1" == '--quantize' || "$arg1" == '-q' ]]; then - ./quantize "$@" + ./llama-quantize "$@" elif [[ "$arg1" == '--run' || "$arg1" == '-r' ]]; then - ./main "$@" + ./llama-cli "$@" elif [[ "$arg1" == '--finetune' || "$arg1" == '-f' ]]; then - ./finetune "$@" + ./llama-finetune "$@" elif [[ "$arg1" == '--all-in-one' || "$arg1" == '-a' ]]; then echo "Converting PTH to GGML..." for i in `ls $1/$2/ggml-model-f16.bin*`; do @@ -22,11 +22,11 @@ elif [[ "$arg1" == '--all-in-one' || "$arg1" == '-a' ]]; then echo "Skip model quantization, it already exists: ${i/f16/q4_0}" else echo "Converting PTH to GGML: $i into ${i/f16/q4_0}..." - ./quantize "$i" "${i/f16/q4_0}" q4_0 + ./llama-quantize "$i" "${i/f16/q4_0}" q4_0 fi done elif [[ "$arg1" == '--server' || "$arg1" == '-s' ]]; then - ./server "$@" + ./llama-server "$@" else echo "Unknown command: $arg1" echo "Available commands: " diff --git a/.dockerignore b/.dockerignore index 633bbc3a9..8916e2a66 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,8 +12,8 @@ build*/ models/* -/main -/quantize +/llama-cli +/llama-quantize arm_neon.h compile_commands.json diff --git a/.github/ISSUE_TEMPLATE/01-bug-low.yml b/.github/ISSUE_TEMPLATE/01-bug-low.yml index bfb9d9a06..54785854f 100644 --- a/.github/ISSUE_TEMPLATE/01-bug-low.yml +++ b/.github/ISSUE_TEMPLATE/01-bug-low.yml @@ -24,7 +24,7 @@ body: label: Name and Version description: Which executable and which version of our software are you running? (use `--version` to get a version string) placeholder: | - $./main --version + $./llama-cli --version version: 2999 (42b4109e) built with cc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0 for x86_64-linux-gnu validations: diff --git a/.github/ISSUE_TEMPLATE/02-bug-medium.yml b/.github/ISSUE_TEMPLATE/02-bug-medium.yml index e8297eea0..a6285c6f0 100644 --- a/.github/ISSUE_TEMPLATE/02-bug-medium.yml +++ b/.github/ISSUE_TEMPLATE/02-bug-medium.yml @@ -24,7 +24,7 @@ body: label: Name and Version description: Which executable and which version of our software are you running? (use `--version` to get a version string) placeholder: | - $./main --version + $./llama-cli --version version: 2999 (42b4109e) built with cc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0 for x86_64-linux-gnu validations: diff --git a/.github/ISSUE_TEMPLATE/03-bug-high.yml b/.github/ISSUE_TEMPLATE/03-bug-high.yml index 3c9d50d16..ff816b937 100644 --- a/.github/ISSUE_TEMPLATE/03-bug-high.yml +++ b/.github/ISSUE_TEMPLATE/03-bug-high.yml @@ -24,7 +24,7 @@ body: label: Name and Version description: Which executable and which version of our software are you running? (use `--version` to get a version string) placeholder: | - $./main --version + $./llama-cli --version version: 2999 (42b4109e) built with cc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0 for x86_64-linux-gnu validations: diff --git a/.github/ISSUE_TEMPLATE/04-bug-critical.yml b/.github/ISSUE_TEMPLATE/04-bug-critical.yml index d089d5fa1..7af42a80b 100644 --- a/.github/ISSUE_TEMPLATE/04-bug-critical.yml +++ b/.github/ISSUE_TEMPLATE/04-bug-critical.yml @@ -24,7 +24,7 @@ body: label: Name and Version description: Which executable and which version of our software are you running? (use `--version` to get a version string) placeholder: | - $./main --version + $./llama-cli --version version: 2999 (42b4109e) built with cc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0 for x86_64-linux-gnu validations: diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index de0d994c8..88ab4844e 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -119,7 +119,7 @@ jobs: -DLLAMA_FATAL_WARNINGS=OFF \ -DLLAMA_ALL_WARNINGS=OFF \ -DCMAKE_BUILD_TYPE=Release; - cmake --build build --config Release -j $(nproc) --target server + cmake --build build --config Release -j $(nproc) --target llama-server - name: Download the dataset id: download_dataset diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3c04cfc29..81ce770cc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -103,12 +103,10 @@ jobs: id: cmake_build run: | sysctl -a - mkdir build - cd build # Metal is disabled due to intermittent failures with Github runners not having a GPU: # https://github.com/ggerganov/llama.cpp/actions/runs/8635935781/job/23674807267#step:5:2313 - cmake -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_METAL=OFF -DLLAMA_CURL=ON .. - cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) + cmake -B build -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_METAL=OFF -DLLAMA_CURL=ON + cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) - name: Test id: cmake_test @@ -241,8 +239,8 @@ jobs: wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/tok512.bin echo "Fetch llama2c model" wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/stories260K.bin - ./bin/convert-llama2c-to-ggml --copy-vocab-from-model ./tok512.bin --llama2c-model stories260K.bin --llama2c-output-model stories260K.gguf - ./bin/main -m stories260K.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256 + ./bin/llama-convert-llama2c-to-ggml --copy-vocab-from-model ./tok512.bin --llama2c-model stories260K.bin --llama2c-output-model stories260K.gguf + ./bin/llama-cli -m stories260K.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256 - name: Determine tag name id: tag diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 9b03d19bc..6244b4812 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,20 +30,20 @@ jobs: strategy: matrix: config: - - { tag: "light", dockerfile: ".devops/main.Dockerfile", platforms: "linux/amd64,linux/arm64" } + - { tag: "light", dockerfile: ".devops/llama-cli.Dockerfile", platforms: "linux/amd64,linux/arm64" } + - { tag: "server", dockerfile: ".devops/llama-server.Dockerfile", platforms: "linux/amd64,linux/arm64" } - { tag: "full", dockerfile: ".devops/full.Dockerfile", platforms: "linux/amd64,linux/arm64" } - - { tag: "server", dockerfile: ".devops/server.Dockerfile", platforms: "linux/amd64,linux/arm64" } # NOTE(canardletter): The CUDA builds on arm64 are very slow, so I # have disabled them for now until the reason why # is understood. - - { tag: "light-cuda", dockerfile: ".devops/main-cuda.Dockerfile", platforms: "linux/amd64" } + - { tag: "light-cuda", dockerfile: ".devops/llama-cli-cuda.Dockerfile", platforms: "linux/amd64" } + - { tag: "server-cuda", dockerfile: ".devops/llama-server-cuda.Dockerfile", platforms: "linux/amd64" } - { tag: "full-cuda", dockerfile: ".devops/full-cuda.Dockerfile", platforms: "linux/amd64" } - - { tag: "server-cuda", dockerfile: ".devops/server-cuda.Dockerfile", platforms: "linux/amd64" } - - { tag: "light-rocm", dockerfile: ".devops/main-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" } + - { tag: "light-rocm", dockerfile: ".devops/llama-cli-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" } + - { tag: "server-rocm", dockerfile: ".devops/llama-server-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" } - { tag: "full-rocm", dockerfile: ".devops/full-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" } - - { tag: "server-rocm", dockerfile: ".devops/server-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" } - - { tag: "light-intel", dockerfile: ".devops/main-intel.Dockerfile", platforms: "linux/amd64" } - - { tag: "server-intel", dockerfile: ".devops/server-intel.Dockerfile", platforms: "linux/amd64" } + - { tag: "light-intel", dockerfile: ".devops/llama-cli-intel.Dockerfile", platforms: "linux/amd64" } + - { tag: "server-intel", dockerfile: ".devops/llama-server-intel.Dockerfile", platforms: "linux/amd64" } steps: - name: Check out the repo uses: actions/checkout@v4 diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 0d16ef5f1..1fee9ac28 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -96,7 +96,7 @@ jobs: -DLLAMA_CURL=ON \ -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \ -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; - cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target server + cmake --build build --config ${{ matrix.build_type }} -j $(nproc) --target llama-server - name: Tests id: server_integration_tests @@ -136,7 +136,7 @@ jobs: id: cmake_build run: | cmake -B build -DLLAMA_CURL=ON -DCURL_LIBRARY="$env:RUNNER_TEMP/libcurl/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:RUNNER_TEMP/libcurl/include" - cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target server + cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target llama-server - name: Python setup id: setup_python diff --git a/.gitignore b/.gitignore index 5223c6963..529659495 100644 --- a/.gitignore +++ b/.gitignore @@ -46,48 +46,9 @@ models/* models-mnt /Pipfile -/baby-llama -/beam-search -/benchmark-matmult -/convert-llama2c-to-ggml -/embd-input-test -/embedding -/eval-callback -/gguf -/gguf-llama-simple -/gguf-split -/gritlm -/imatrix -/infill /libllama.so -/llama-bench -/llava-cli -/lookahead -/lookup -/lookup-create -/lookup-merge -/lookup-stats -/main -/metal -/passkey -/perplexity -/q8dot -/quantize -/quantize-stats -/result -/save-load-state -/server -/simple -/batched -/batched-bench -/export-lora -/finetune -/retrieval -/speculative -/parallel -/train-text-from-scratch -/tokenize -/vdot +/llama-* +llama-batched-swift /common/build-info.cpp arm_neon.h compile_commands.json diff --git a/Makefile b/Makefile index 895c62f84..a4cab1bb2 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,44 @@ # Define the default target now so that it is always the first target BUILD_TARGETS = \ - main quantize quantize-stats perplexity imatrix embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ - simple batched batched-bench save-load-state server gguf gguf-split eval-callback llama-bench libllava.a llava-cli baby-llama \ - retrieval speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup passkey gritlm tests/test-c.o + libllava.a \ + llama-baby-llama \ + llama-batched \ + llama-batched-bench \ + llama-bench \ + llama-benchmark-matmult \ + llama-cli \ + llama-convert-llama2c-to-ggml \ + llama-embedding \ + llama-eval-callback \ + llama-export-lora \ + llama-finetune \ + llama-gbnf-validator \ + llama-gguf \ + llama-gguf-split \ + llama-gritlm \ + llama-imatrix \ + llama-infill \ + llama-llava-cli \ + llama-lookahead \ + llama-lookup \ + llama-lookup-create \ + llama-lookup-merge \ + llama-lookup-stats \ + llama-parallel \ + llama-passkey \ + llama-perplexity \ + llama-q8dot \ + llama-quantize \ + llama-quantize-stats \ + llama-retrieval \ + llama-save-load-state \ + llama-server \ + llama-simple \ + llama-speculative \ + llama-tokenize \ + llama-train-text-from-scratch \ + llama-vdot \ + tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -777,7 +813,7 @@ libllama.a: llama.o ggml.o $(OBJS) $(COMMON_DEPS) ar rcs libllama.a llama.o ggml.o $(OBJS) $(COMMON_DEPS) clean: - rm -vrf *.o tests/*.o *.so *.a *.dll benchmark-matmult lookup-create lookup-merge lookup-stats common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + rm -vrf *.o tests/*.o *.so *.a *.dll common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) rm -vrf ggml-cuda/*.o rm -vrf ggml-cuda/template-instances/*.o find examples pocs -type f -name "*.o" -delete @@ -793,62 +829,62 @@ clean: # Helper function that replaces .c, .cpp, and .cu file endings with .o: GET_OBJ_FILE = $(patsubst %.c,%.o,$(patsubst %.cpp,%.o,$(patsubst %.cu,%.o,$(1)))) -main: examples/main/main.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) +llama-cli: examples/main/main.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) @echo - @echo '==== Run ./main -h for help. ====' + @echo '==== Run ./llama-cli -h for help. ====' @echo -infill: examples/infill/infill.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) +llama-infill: examples/infill/infill.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -simple: examples/simple/simple.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-simple: examples/simple/simple.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -tokenize: examples/tokenize/tokenize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-tokenize: examples/tokenize/tokenize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -batched: examples/batched/batched.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-batched: examples/batched/batched.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -quantize: examples/quantize/quantize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-quantize: examples/quantize/quantize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml.o llama.o $(OBJS) +llama-quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -gritlm: examples/gritlm/gritlm.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-gritlm: examples/gritlm/gritlm.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h common/json.hpp examples/server/colorthemes.css.hpp examples/server/style.css.hpp examples/server/theme-beeninorder.css.hpp examples/server/theme-ketivah.css.hpp examples/server/theme-mangotango.css.hpp examples/server/theme-playground.css.hpp examples/server/theme-polarnight.css.hpp examples/server/theme-snowstorm.css.hpp examples/server/index.html.hpp examples/server/index-new.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/server/system-prompts.js.hpp examples/server/prompt-formats.js.hpp examples/server/json-schema-to-grammar.mjs.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +llama-server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h common/json.hpp examples/server/colorthemes.css.hpp examples/server/style.css.hpp examples/server/theme-beeninorder.css.hpp examples/server/theme-ketivah.css.hpp examples/server/theme-mangotango.css.hpp examples/server/theme-playground.css.hpp examples/server/theme-polarnight.css.hpp examples/server/theme-snowstorm.css.hpp examples/server/index.html.hpp examples/server/index-new.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/server/system-prompts.js.hpp examples/server/prompt-formats.js.hpp examples/server/json-schema-to-grammar.mjs.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) @@ -861,23 +897,23 @@ examples/server/%.hpp: examples/server/public/% Makefile echo "unsigned int $${NAME}_len = $(shell cat $< | wc -c );" \ ) > $@ -gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) +llama-gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -gguf-split: examples/gguf-split/gguf-split.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-gguf-split: examples/gguf-split/gguf-split.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -eval-callback: examples/eval-callback/eval-callback.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-eval-callback: examples/eval-callback/eval-callback.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) +llama-train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp ggml.o llama.o $(OBJS) +llama-convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) @@ -888,55 +924,61 @@ llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) libllava.a: examples/llava/llava.cpp examples/llava/llava.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h common/base64.hpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ -Wno-cast-qual -llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual $(CXX) $(CXXFLAGS) -c examples/llava/llava.cpp -o $(call GET_OBJ_FILE, examples/llava/llava.cpp) $(CXX) $(CXXFLAGS) $(filter-out %.h $< examples/llava/clip.cpp examples/llava/llava.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) $(call GET_OBJ_FILE, examples/llava/llava.cpp) -o $@ $(LDFLAGS) -baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) +llama-baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) +llama-finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -export-lora: examples/export-lora/export-lora.cpp ggml.o common/common.h $(OBJS) +llama-export-lora: examples/export-lora/export-lora.cpp ggml.o common/common.h $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -retrieval: examples/retrieval/retrieval.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-retrieval: examples/retrieval/retrieval.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +llama-speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -lookup: examples/lookup/lookup.cpp ggml.o llama.o ngram-cache.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) - $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) - $(CXX) $(CXXFLAGS) -c examples/lookup/lookup-create.cpp -o $(call GET_OBJ_FILE, examples/lookup/lookup-create.cpp) - $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, examples/lookup/lookup-create.cpp) -o lookup-create $(LDFLAGS) - $(CXX) $(CXXFLAGS) -c examples/lookup/lookup-merge.cpp -o $(call GET_OBJ_FILE, examples/lookup/lookup-merge.cpp) - $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, examples/lookup/lookup-merge.cpp) -o lookup-merge $(LDFLAGS) - $(CXX) $(CXXFLAGS) -c examples/lookup/lookup-stats.cpp -o $(call GET_OBJ_FILE, examples/lookup/lookup-stats.cpp) - $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, examples/lookup/lookup-stats.cpp) -o lookup-stats $(LDFLAGS) - -passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-lookup: examples/lookup/lookup.cpp ggml.o llama.o ngram-cache.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -gbnf-validator: examples/gbnf-validator/gbnf-validator.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +llama-lookup-create: examples/lookup/lookup-create.cpp ggml.o llama.o ngram-cache.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +llama-lookup-merge: examples/lookup/lookup-merge.cpp ggml.o llama.o ngram-cache.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +llama-lookup-stats: examples/lookup/lookup-stats.cpp ggml.o llama.o ngram-cache.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +llama-passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +llama-gbnf-validator: examples/gbnf-validator/gbnf-validator.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) @@ -962,20 +1004,20 @@ build-info.o: common/build-info.cpp tests: $(TEST_TARGETS) -benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.o ggml.o $(OBJS) +llama-benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -run-benchmark-matmult: benchmark-matmult +run-benchmark-matmult: llama-benchmark-matmult ./$@ .PHONY: run-benchmark-matmult swift -vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) +llama-vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) +llama-q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) diff --git a/README-sycl.md b/README-sycl.md index 62b38135c..93b623daf 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -77,7 +77,7 @@ It has the similar design of other llama.cpp BLAS-based paths such as *OpenBLAS, *Notes:* - **Memory** - - The device memory is a limitation when running a large model. The loaded model size, *`llm_load_tensors: buffer_size`*, is displayed in the log when running `./bin/main`. + - The device memory is a limitation when running a large model. The loaded model size, *`llm_load_tensors: buffer_size`*, is displayed in the log when running `./bin/llama-cli`. - Please make sure the GPU shared memory from the host is large enough to account for the model's size. For e.g. the *llama-2-7b.Q4_0* requires at least 8.0GB for integrated GPU and 4.0GB for discrete GPU. @@ -99,14 +99,14 @@ The docker build option is currently limited to *intel GPU* targets. ### Build image ```sh # Using FP16 -docker build -t llama-cpp-sycl --build-arg="LLAMA_SYCL_F16=ON" -f .devops/main-intel.Dockerfile . +docker build -t llama-cpp-sycl --build-arg="LLAMA_SYCL_F16=ON" -f .devops/llama-cli-intel.Dockerfile . ``` *Notes*: To build in default FP32 *(Slower than FP16 alternative)*, you can remove the `--build-arg="LLAMA_SYCL_F16=ON"` argument from the previous command. -You can also use the `.devops/server-intel.Dockerfile`, which builds the *"server"* alternative. +You can also use the `.devops/llama-server-intel.Dockerfile`, which builds the *"server"* alternative. ### Run container @@ -275,7 +275,7 @@ source /opt/intel/oneapi/setvars.sh Similar to the native `sycl-ls`, available SYCL devices can be queried as follow: ```sh -./build/bin/ls-sycl-device +./build/bin/llama-ls-sycl-device ``` A example of such log in a system with 1 *intel CPU* and 1 *intel GPU* can look like the following: ``` @@ -313,7 +313,7 @@ Examples: - Use device 0: ```sh -ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 -sm none -mg 0 +ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 -sm none -mg 0 ``` or run by script: @@ -324,7 +324,7 @@ or run by script: - Use multiple devices: ```sh -ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 -sm layer +ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 -sm layer ``` Otherwise, you can run the script: @@ -427,7 +427,7 @@ Otherwise, run the `win-build-sycl.bat` wrapper which encapsulates the former in *Notes:* -- By default, calling `make` will build all target binary files. In case of a minimal experimental setup, the user can build the inference executable only through `make main`. +- By default, calling `make` will build all target binary files. In case of a minimal experimental setup, the user can build the inference executable only through `make llama-cli`. ### III. Run the inference @@ -488,13 +488,13 @@ Examples: - Use device 0: ``` -build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 -sm none -mg 0 +build\bin\llama-cli.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 -sm none -mg 0 ``` - Use multiple devices: ``` -build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 -sm layer +build\bin\llama-cli.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 -sm layer ``` Otherwise, run the following wrapper script: diff --git a/README.md b/README.md index 8c065aace..d1c6190dd 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,9 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) in pure C/C++ +> [!IMPORTANT] +[2024 Jun 12] Binaries have been renamed w/ a `llama-` prefix. `main` is now `llama-cli`, `server` is `llama-server`, etc (https://github.com/ggerganov/llama.cpp/pull/7809) + ### Recent API changes - [2024 Apr 21] `llama_token_to_piece` can now optionally render special tokens https://github.com/ggerganov/llama.cpp/pull/6807 @@ -217,7 +220,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: Here is a typical run using LLaMA v2 13B on M2 Ultra: ``` -$ make -j && ./main -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e +$ make -j && ./llama-cli -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e I llama.cpp build info: I UNAME_S: Darwin I UNAME_P: arm @@ -555,7 +558,7 @@ Building the program with BLAS support may lead to some performance improvements ```sh # Build the image - docker build -t llama-cpp-vulkan -f .devops/main-vulkan.Dockerfile . + docker build -t llama-cpp-vulkan -f .devops/llama-cli-vulkan.Dockerfile . # Then, use it: docker run -it --rm -v "$(pwd):/app:Z" --device /dev/dri/renderD128:/dev/dri/renderD128 --device /dev/dri/card1:/dev/dri/card1 llama-cpp-vulkan -m "/app/models/YOUR_MODEL_FILE" -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 @@ -586,7 +589,7 @@ Building the program with BLAS support may lead to some performance improvements cmake -B build -DLLAMA_VULKAN=1 cmake --build build --config Release # Test the output binary (with "-ngl 33" to offload all layers to GPU) - ./bin/main -m "PATH_TO_MODEL" -p "Hi you how are you" -n 50 -e -ngl 33 -t 4 + ./bin/llama-cli -m "PATH_TO_MODEL" -p "Hi you how are you" -n 50 -e -ngl 33 -t 4 # You should see in the output, ggml_vulkan detected your GPU. For example: # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 @@ -623,17 +626,17 @@ python3 convert-hf-to-gguf.py models/mymodel/ python convert-hf-to-gguf.py models/mymodel/ --vocab-type bpe # quantize the model to 4-bits (using Q4_K_M method) -./quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M +./llama-quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M # update the gguf filetype to current version if older version is now unsupported -./quantize ./models/mymodel/ggml-model-Q4_K_M.gguf ./models/mymodel/ggml-model-Q4_K_M-v2.gguf COPY +./llama-quantize ./models/mymodel/ggml-model-Q4_K_M.gguf ./models/mymodel/ggml-model-Q4_K_M-v2.gguf COPY ``` ### Run the quantized model ```bash # start inference on a gguf model -./main -m ./models/mymodel/ggml-model-Q4_K_M.gguf -n 128 +./llama-cli -m ./models/mymodel/ggml-model-Q4_K_M.gguf -n 128 ``` When running the larger models, make sure you have enough disk space to store all the intermediate files. @@ -708,7 +711,7 @@ The time per token is measured on a MacBook M1 Pro 32GB RAM using 4 and 8 thread #### How to run 1. Download/extract: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip -2. Run `./perplexity -m models/7B/ggml-model-q4_0.gguf -f wiki.test.raw` +2. Run `./llama-perplexity -m models/7B/ggml-model-q4_0.gguf -f wiki.test.raw` 3. Output: ``` perplexity : calculating perplexity over 655 chunks @@ -732,16 +735,16 @@ Here is an example of a few-shot interaction, invoked with the command ./examples/chat-13B.sh # custom arguments using a 13B model -./main -m ./models/13B/ggml-model-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-bob.txt +./llama-cli -m ./models/13B/ggml-model-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-bob.txt ``` -Note the use of `--color` to distinguish between user input and generated text. Other parameters are explained in more detail in the [README](examples/main/README.md) for the `main` example program. +Note the use of `--color` to distinguish between user input and generated text. Other parameters are explained in more detail in the [README](examples/main/README.md) for the `llama-cli` example program. ![image](https://user-images.githubusercontent.com/1991296/224575029-2af3c7dc-5a65-4f64-a6bb-517a532aea38.png) ### Persistent Interaction -The prompt, user inputs, and model generations can be saved and resumed across calls to `./main` by leveraging `--prompt-cache` and `--prompt-cache-all`. The `./examples/chat-persistent.sh` script demonstrates this with support for long-running, resumable chat sessions. To use this example, you must provide a file to cache the initial chat prompt and a directory to save the chat session, and may optionally provide the same variables as `chat-13B.sh`. The same prompt cache can be reused for new chat sessions. Note that both prompt cache and chat directory are tied to the initial prompt (`PROMPT_TEMPLATE`) and the model file. +The prompt, user inputs, and model generations can be saved and resumed across calls to `./llama-cli` by leveraging `--prompt-cache` and `--prompt-cache-all`. The `./examples/chat-persistent.sh` script demonstrates this with support for long-running, resumable chat sessions. To use this example, you must provide a file to cache the initial chat prompt and a directory to save the chat session, and may optionally provide the same variables as `chat-13B.sh`. The same prompt cache can be reused for new chat sessions. Note that both prompt cache and chat directory are tied to the initial prompt (`PROMPT_TEMPLATE`) and the model file. ```bash # Start a new chat @@ -763,7 +766,7 @@ PROMPT_TEMPLATE=./prompts/chat-with-bob.txt PROMPT_CACHE_FILE=bob.prompt.bin \ `llama.cpp` supports grammars to constrain model output. For example, you can force the model to output JSON only: ```bash -./main -m ./models/13B/ggml-model-q4_0.gguf -n 256 --grammar-file grammars/json.gbnf -p 'Request: schedule a call at 8pm; Command:' +./llama-cli -m ./models/13B/ggml-model-q4_0.gguf -n 256 --grammar-file grammars/json.gbnf -p 'Request: schedule a call at 8pm; Command:' ``` The `grammars/` folder contains a handful of sample grammars. To write your own, check out the [GBNF Guide](./grammars/README.md). @@ -842,7 +845,7 @@ $mv /sdcard/llama.cpp/llama-2-7b-chat.Q4_K_M.gguf /data/data/com.termux/files/ho Now, you can start chatting: ``` $cd /data/data/com.termux/files/home/bin -$./main -m ../model/llama-2-7b-chat.Q4_K_M.gguf -n 128 -cml +$./llama-cli -m ../model/llama-2-7b-chat.Q4_K_M.gguf -n 128 -cml ``` Here's a demo of an interactive session running on Pixel 5 phone: @@ -909,8 +912,8 @@ Assuming one has the [nvidia-container-toolkit](https://github.com/NVIDIA/nvidia ```bash docker build -t local/llama.cpp:full-cuda -f .devops/full-cuda.Dockerfile . -docker build -t local/llama.cpp:light-cuda -f .devops/main-cuda.Dockerfile . -docker build -t local/llama.cpp:server-cuda -f .devops/server-cuda.Dockerfile . +docker build -t local/llama.cpp:light-cuda -f .devops/llama-cli-cuda.Dockerfile . +docker build -t local/llama.cpp:server-cuda -f .devops/llama-server-cuda.Dockerfile . ``` You may want to pass in some different `ARGS`, depending on the CUDA environment supported by your container host, as well as the GPU architecture. @@ -960,7 +963,7 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:server-cuda -m ### Docs -- [main](./examples/main/README.md) +- [main (cli)](./examples/main/README.md) - [server](./examples/server/README.md) - [jeopardy](./examples/jeopardy/README.md) - [BLIS](./docs/BLIS.md) diff --git a/ci/run.sh b/ci/run.sh index 3fc5f48b2..291c44f47 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -303,47 +303,47 @@ function gg_run_open_llama_7b_v2 { wiki_test="${path_wiki}/wiki.test.raw" - ./bin/quantize ${model_f16} ${model_q8_0} q8_0 - ./bin/quantize ${model_f16} ${model_q4_0} q4_0 - ./bin/quantize ${model_f16} ${model_q4_1} q4_1 - ./bin/quantize ${model_f16} ${model_q5_0} q5_0 - ./bin/quantize ${model_f16} ${model_q5_1} q5_1 - ./bin/quantize ${model_f16} ${model_q2_k} q2_k - ./bin/quantize ${model_f16} ${model_q3_k} q3_k - ./bin/quantize ${model_f16} ${model_q4_k} q4_k - ./bin/quantize ${model_f16} ${model_q5_k} q5_k - ./bin/quantize ${model_f16} ${model_q6_k} q6_k + ./bin/llama-quantize ${model_f16} ${model_q8_0} q8_0 + ./bin/llama-quantize ${model_f16} ${model_q4_0} q4_0 + ./bin/llama-quantize ${model_f16} ${model_q4_1} q4_1 + ./bin/llama-quantize ${model_f16} ${model_q5_0} q5_0 + ./bin/llama-quantize ${model_f16} ${model_q5_1} q5_1 + ./bin/llama-quantize ${model_f16} ${model_q2_k} q2_k + ./bin/llama-quantize ${model_f16} ${model_q3_k} q3_k + ./bin/llama-quantize ${model_f16} ${model_q4_k} q4_k + ./bin/llama-quantize ${model_f16} ${model_q5_k} q5_k + ./bin/llama-quantize ${model_f16} ${model_q6_k} q6_k - (time ./bin/main --model ${model_f16} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/main --model ${model_q8_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/main --model ${model_q4_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/main --model ${model_q4_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/main --model ${model_q5_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/main --model ${model_q5_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/main --model ${model_q2_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/main --model ${model_q3_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/main --model ${model_q4_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/main --model ${model_q5_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/main --model ${model_q6_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-cli --model ${model_f16} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-cli --model ${model_q8_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-cli --model ${model_q4_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-cli --model ${model_q4_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-cli --model ${model_q5_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-cli --model ${model_q5_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-cli --model ${model_q2_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-cli --model ${model_q3_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-cli --model ${model_q4_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-cli --model ${model_q5_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-cli --model ${model_q6_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/perplexity --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/perplexity --model ${model_q8_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/perplexity --model ${model_q4_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/perplexity --model ${model_q4_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/perplexity --model ${model_q5_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/perplexity --model ${model_q5_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/perplexity --model ${model_q2_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/perplexity --model ${model_q3_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/perplexity --model ${model_q4_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/perplexity --model ${model_q5_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/perplexity --model ${model_q6_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-perplexity --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-perplexity --model ${model_q8_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-perplexity --model ${model_q4_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-perplexity --model ${model_q4_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-perplexity --model ${model_q5_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-perplexity --model ${model_q5_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-perplexity --model ${model_q2_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-perplexity --model ${model_q3_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-perplexity --model ${model_q4_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-perplexity --model ${model_q5_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-perplexity --model ${model_q6_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/imatrix --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log + (time ./bin/llama-imatrix --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log - (time ./bin/save-load-state -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -fa -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -fa -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -fa -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -fa -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log function check_ppl { qnt="$1" @@ -437,45 +437,45 @@ function gg_run_pythia_1_4b { wiki_test_60="${path_wiki}/wiki.test-60.raw" - ./bin/quantize ${model_f16} ${model_q8_0} q8_0 - ./bin/quantize ${model_f16} ${model_q4_0} q4_0 - ./bin/quantize ${model_f16} ${model_q4_1} q4_1 - ./bin/quantize ${model_f16} ${model_q5_0} q5_0 - ./bin/quantize ${model_f16} ${model_q5_1} q5_1 - ./bin/quantize ${model_f16} ${model_q2_k} q2_k - ./bin/quantize ${model_f16} ${model_q3_k} q3_k - ./bin/quantize ${model_f16} ${model_q4_k} q4_k - ./bin/quantize ${model_f16} ${model_q5_k} q5_k - ./bin/quantize ${model_f16} ${model_q6_k} q6_k + ./bin/llama-quantize ${model_f16} ${model_q8_0} q8_0 + ./bin/llama-quantize ${model_f16} ${model_q4_0} q4_0 + ./bin/llama-quantize ${model_f16} ${model_q4_1} q4_1 + ./bin/llama-quantize ${model_f16} ${model_q5_0} q5_0 + ./bin/llama-quantize ${model_f16} ${model_q5_1} q5_1 + ./bin/llama-quantize ${model_f16} ${model_q2_k} q2_k + ./bin/llama-quantize ${model_f16} ${model_q3_k} q3_k + ./bin/llama-quantize ${model_f16} ${model_q4_k} q4_k + ./bin/llama-quantize ${model_f16} ${model_q5_k} q5_k + ./bin/llama-quantize ${model_f16} ${model_q6_k} q6_k - (time ./bin/main --model ${model_f16} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/main --model ${model_q8_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/main --model ${model_q4_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/main --model ${model_q4_1} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/main --model ${model_q5_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/main --model ${model_q5_1} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/main --model ${model_q2_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/main --model ${model_q3_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/main --model ${model_q4_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/main --model ${model_q5_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/main --model ${model_q6_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-cli --model ${model_f16} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-cli --model ${model_q8_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-cli --model ${model_q4_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-cli --model ${model_q4_1} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-cli --model ${model_q5_0} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-cli --model ${model_q5_1} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-cli --model ${model_q2_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-cli --model ${model_q3_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-cli --model ${model_q4_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-cli --model ${model_q5_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-cli --model ${model_q6_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/perplexity --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/perplexity --model ${model_q8_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/perplexity --model ${model_q4_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/perplexity --model ${model_q4_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/perplexity --model ${model_q5_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/perplexity --model ${model_q5_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/perplexity --model ${model_q2_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/perplexity --model ${model_q3_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/perplexity --model ${model_q4_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/perplexity --model ${model_q5_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/perplexity --model ${model_q6_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-perplexity --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-perplexity --model ${model_q8_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-perplexity --model ${model_q4_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-perplexity --model ${model_q4_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-perplexity --model ${model_q5_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-perplexity --model ${model_q5_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-perplexity --model ${model_q2_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-perplexity --model ${model_q3_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-perplexity --model ${model_q4_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-perplexity --model ${model_q5_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-perplexity --model ${model_q6_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/imatrix --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log + (time ./bin/llama-imatrix --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log - (time ./bin/save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -fa --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -fa --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log function check_ppl { qnt="$1" @@ -569,47 +569,47 @@ function gg_run_pythia_2_8b { wiki_test="${path_wiki}/wiki.test.raw" - ./bin/quantize ${model_f16} ${model_q8_0} q8_0 - ./bin/quantize ${model_f16} ${model_q4_0} q4_0 - ./bin/quantize ${model_f16} ${model_q4_1} q4_1 - ./bin/quantize ${model_f16} ${model_q5_0} q5_0 - ./bin/quantize ${model_f16} ${model_q5_1} q5_1 - ./bin/quantize ${model_f16} ${model_q2_k} q2_k - ./bin/quantize ${model_f16} ${model_q3_k} q3_k - ./bin/quantize ${model_f16} ${model_q4_k} q4_k - ./bin/quantize ${model_f16} ${model_q5_k} q5_k - ./bin/quantize ${model_f16} ${model_q6_k} q6_k + ./bin/llama-quantize ${model_f16} ${model_q8_0} q8_0 + ./bin/llama-quantize ${model_f16} ${model_q4_0} q4_0 + ./bin/llama-quantize ${model_f16} ${model_q4_1} q4_1 + ./bin/llama-quantize ${model_f16} ${model_q5_0} q5_0 + ./bin/llama-quantize ${model_f16} ${model_q5_1} q5_1 + ./bin/llama-quantize ${model_f16} ${model_q2_k} q2_k + ./bin/llama-quantize ${model_f16} ${model_q3_k} q3_k + ./bin/llama-quantize ${model_f16} ${model_q4_k} q4_k + ./bin/llama-quantize ${model_f16} ${model_q5_k} q5_k + ./bin/llama-quantize ${model_f16} ${model_q6_k} q6_k - (time ./bin/main --model ${model_f16} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/main --model ${model_q8_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/main --model ${model_q4_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/main --model ${model_q4_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/main --model ${model_q5_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/main --model ${model_q5_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/main --model ${model_q2_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/main --model ${model_q3_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/main --model ${model_q4_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/main --model ${model_q5_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/main --model ${model_q6_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-cli --model ${model_f16} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-cli --model ${model_q8_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-cli --model ${model_q4_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-cli --model ${model_q4_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-cli --model ${model_q5_0} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-cli --model ${model_q5_1} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-cli --model ${model_q2_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-cli --model ${model_q3_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-cli --model ${model_q4_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-cli --model ${model_q5_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-cli --model ${model_q6_k} -t 1 -ngl 999 -s 1234 -n 256 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/perplexity --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/perplexity --model ${model_q8_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/perplexity --model ${model_q4_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/perplexity --model ${model_q4_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/perplexity --model ${model_q5_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/perplexity --model ${model_q5_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/perplexity --model ${model_q2_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/perplexity --model ${model_q3_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/perplexity --model ${model_q4_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/perplexity --model ${model_q5_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/perplexity --model ${model_q6_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/llama-perplexity --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-perplexity --model ${model_q8_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-perplexity --model ${model_q4_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/llama-perplexity --model ${model_q4_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/llama-perplexity --model ${model_q5_0} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/llama-perplexity --model ${model_q5_1} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/llama-perplexity --model ${model_q2_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/llama-perplexity --model ${model_q3_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/llama-perplexity --model ${model_q4_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/llama-perplexity --model ${model_q5_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/llama-perplexity --model ${model_q6_k} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/imatrix --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log + (time ./bin/llama-imatrix --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log - (time ./bin/save-load-state -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -fa -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log - (time ./bin/save-load-state -fa -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -fa -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/llama-save-load-state -fa -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log function check_ppl { qnt="$1" @@ -693,10 +693,10 @@ function gg_run_embd_bge_small { model_f16="${path_models}/ggml-model-f16.gguf" model_q8_0="${path_models}/ggml-model-q8_0.gguf" - ./bin/quantize ${model_f16} ${model_q8_0} q8_0 + ./bin/llama-quantize ${model_f16} ${model_q8_0} q8_0 - (time ./bin/embedding --model ${model_f16} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/embedding --model ${model_q8_0} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/llama-embedding --model ${model_f16} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/llama-embedding --model ${model_q8_0} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log set +e } diff --git a/docs/HOWTO-add-model.md b/docs/HOWTO-add-model.md index 138124248..3eec077ea 100644 --- a/docs/HOWTO-add-model.md +++ b/docs/HOWTO-add-model.md @@ -100,7 +100,7 @@ Have a look at existing implementation like `build_llama`, `build_dbrx` or `buil When implementing a new graph, please note that the underlying `ggml` backends might not support them all, support for missing backend operations can be added in another PR. -Note: to debug the inference graph: you can use [eval-callback](../examples/eval-callback). +Note: to debug the inference graph: you can use [llama-eval-callback](../examples/eval-callback). ## GGUF specification diff --git a/docs/token_generation_performance_tips.md b/docs/token_generation_performance_tips.md index 3c4343147..c0840cad5 100644 --- a/docs/token_generation_performance_tips.md +++ b/docs/token_generation_performance_tips.md @@ -3,7 +3,7 @@ ## Verifying that the model is running on the GPU with CUDA Make sure you compiled llama with the correct env variables according to [this guide](../README.md#CUDA), so that llama accepts the `-ngl N` (or `--n-gpu-layers N`) flag. When running llama, you may configure `N` to be very large, and llama will offload the maximum possible number of layers to the GPU, even if it's less than the number you configured. For example: ```shell -./main -m "path/to/model.gguf" -ngl 200000 -p "Please sir, may I have some " +./llama-cli -m "path/to/model.gguf" -ngl 200000 -p "Please sir, may I have some " ``` When running llama, before it starts the inference work, it will output diagnostic information that shows whether cuBLAS is offloading work to the GPU. Look for these lines: @@ -27,7 +27,7 @@ RAM: 32GB Model: `TheBloke_Wizard-Vicuna-30B-Uncensored-GGML/Wizard-Vicuna-30B-Uncensored.q4_0.gguf` (30B parameters, 4bit quantization, GGML) -Run command: `./main -m "path/to/model.gguf" -p "An extremely detailed description of the 10 best ethnic dishes will follow, with recipes: " -n 1000 [additional benchmark flags]` +Run command: `./llama-cli -m "path/to/model.gguf" -p "An extremely detailed description of the 10 best ethnic dishes will follow, with recipes: " -n 1000 [additional benchmark flags]` Result: diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 53002f8e1..d6ce35f4c 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -13,42 +13,43 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}) if (EMSCRIPTEN) else() add_subdirectory(baby-llama) - add_subdirectory(batched) add_subdirectory(batched-bench) + add_subdirectory(batched) add_subdirectory(benchmark) add_subdirectory(convert-llama2c-to-ggml) add_subdirectory(embedding) add_subdirectory(eval-callback) + add_subdirectory(export-lora) add_subdirectory(finetune) - add_subdirectory(gritlm) + add_subdirectory(gbnf-validator) add_subdirectory(gguf-split) + add_subdirectory(gguf) + add_subdirectory(gritlm) + add_subdirectory(imatrix) add_subdirectory(infill) add_subdirectory(llama-bench) add_subdirectory(llava) - if (LLAMA_SYCL) - add_subdirectory(sycl) - endif() - add_subdirectory(main) - add_subdirectory(tokenize) - add_subdirectory(parallel) - add_subdirectory(perplexity) - add_subdirectory(quantize) - add_subdirectory(quantize-stats) - add_subdirectory(retrieval) - add_subdirectory(save-load-state) - add_subdirectory(simple) - add_subdirectory(passkey) - add_subdirectory(speculative) add_subdirectory(lookahead) add_subdirectory(lookup) - add_subdirectory(gguf) - add_subdirectory(train-text-from-scratch) - add_subdirectory(imatrix) - if (LLAMA_BUILD_SERVER) - add_subdirectory(server) - endif() - add_subdirectory(export-lora) + add_subdirectory(main) + add_subdirectory(parallel) + add_subdirectory(passkey) + add_subdirectory(perplexity) + add_subdirectory(quantize-stats) + add_subdirectory(quantize) + add_subdirectory(retrieval) if (LLAMA_RPC) add_subdirectory(rpc) endif() + if (LLAMA_BUILD_SERVER) + add_subdirectory(server) + endif() + if (LLAMA_SYCL) + add_subdirectory(sycl) + endif() + add_subdirectory(save-load-state) + add_subdirectory(simple) + add_subdirectory(speculative) + add_subdirectory(tokenize) + add_subdirectory(train-text-from-scratch) endif() diff --git a/examples/Miku.sh b/examples/Miku.sh index b9174b4e6..0f6c8c878 100755 --- a/examples/Miku.sh +++ b/examples/Miku.sh @@ -22,7 +22,7 @@ if [ -n "$N_THREAD" ]; then GEN_OPTIONS+=(--threads "$N_THREAD") fi -./main "${GEN_OPTIONS[@]}" \ +./llama-cli "${GEN_OPTIONS[@]}" \ --model "$MODEL" \ --in-prefix " " \ --in-suffix "${AI_NAME}:" \ diff --git a/examples/baby-llama/CMakeLists.txt b/examples/baby-llama/CMakeLists.txt index 7b70227a5..71b82105c 100644 --- a/examples/baby-llama/CMakeLists.txt +++ b/examples/baby-llama/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET baby-llama) +set(TARGET llama-baby-llama) add_executable(${TARGET} baby-llama.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/base-translate.sh b/examples/base-translate.sh index 00dedd0df..103a52f55 100755 --- a/examples/base-translate.sh +++ b/examples/base-translate.sh @@ -58,4 +58,4 @@ echo "$2 model=$1 # generate the most likely continuation until the string "===" is found -./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -r "===" $eargs +./llama-cli -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -r "===" $eargs diff --git a/examples/batched-bench/CMakeLists.txt b/examples/batched-bench/CMakeLists.txt index 40a032c51..959acaeee 100644 --- a/examples/batched-bench/CMakeLists.txt +++ b/examples/batched-bench/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET batched-bench) +set(TARGET llama-batched-bench) add_executable(${TARGET} batched-bench.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/batched-bench/README.md b/examples/batched-bench/README.md index fa4baf640..4a07fe6bb 100644 --- a/examples/batched-bench/README.md +++ b/examples/batched-bench/README.md @@ -10,16 +10,16 @@ There are 2 modes of operation: - `prompt is shared` - there is a common prompt of size `PP` used by all batches (i.e. `N_KV = PP + B*TG`) ```bash -./batched-bench -m model.gguf -c 2048 -b 2048 -ub 512 -npp 128,256,512 -ntg 128,256 -npl 1,2,4,8,16,32 [-pps] +./llama-batched-bench -m model.gguf -c 2048 -b 2048 -ub 512 -npp 128,256,512 -ntg 128,256 -npl 1,2,4,8,16,32 [-pps] # LLaMA 7B, F16, N_KV_MAX = 16384 (8GB), prompt not shared -./batched-bench -m ./models/llama-7b/ggml-model-f16.gguf -c 16384 -b 2048 -ub 512 -ngl 99 +./llama-batched-bench -m ./models/llama-7b/ggml-model-f16.gguf -c 16384 -b 2048 -ub 512 -ngl 99 # LLaMA 7B, Q8_0, N_KV_MAX = 16384 (8GB), prompt is shared -./batched-bench -m ./models/llama-7b/ggml-model-q8_0.gguf -c 16384 -b 2048 -ub 512 -ngl 99 -pps +./llama-batched-bench -m ./models/llama-7b/ggml-model-q8_0.gguf -c 16384 -b 2048 -ub 512 -ngl 99 -pps # custom set of batches -./batched-bench -m ./models/llama-7b/ggml-model-q8_0.gguf -c 2048 -b 512 -ub 512 -ngl 999 -npp 128,256,512 -ntg 128,256 -npl 1,2,4,8,16,32 +./llama-batched-bench -m ./models/llama-7b/ggml-model-q8_0.gguf -c 2048 -b 512 -ub 512 -ngl 999 -npp 128,256,512 -ntg 128,256 -npl 1,2,4,8,16,32 ``` ## Sample results diff --git a/examples/batched.swift/Makefile b/examples/batched.swift/Makefile index 2afb24fb8..1f9156e58 100755 --- a/examples/batched.swift/Makefile +++ b/examples/batched.swift/Makefile @@ -1,6 +1,6 @@ .PHONY: build build: - xcodebuild -scheme batched_swift -destination "generic/platform=macOS" -derivedDataPath build - rm -f ./batched_swift - ln -s ./build/Build/Products/Debug/batched_swift ./batched_swift + xcodebuild -scheme llama-batched-swift -destination "generic/platform=macOS" -derivedDataPath build + rm -f ./llama-batched-swift + ln -s ./build/Build/Products/Debug/llama-batched-swift ./llama-batched-swift diff --git a/examples/batched.swift/Package.swift b/examples/batched.swift/Package.swift index 826491def..7e8afd084 100644 --- a/examples/batched.swift/Package.swift +++ b/examples/batched.swift/Package.swift @@ -4,7 +4,7 @@ import PackageDescription let package = Package( - name: "batched_swift", + name: "llama-batched-swift", platforms: [.macOS(.v12)], dependencies: [ .package(name: "llama", path: "../../"), @@ -13,7 +13,7 @@ let package = Package( // Targets are the basic building blocks of a package, defining a module or a test suite. // Targets can depend on other targets in this package and products from dependencies. .executableTarget( - name: "batched_swift", + name: "llama-batched-swift", dependencies: ["llama"], path: "Sources", linkerSettings: [.linkedFramework("Foundation"), .linkedFramework("AppKit")] diff --git a/examples/batched.swift/README.md b/examples/batched.swift/README.md index 4c2721fe8..7f2e2fcdc 100644 --- a/examples/batched.swift/README.md +++ b/examples/batched.swift/README.md @@ -1,4 +1,4 @@ This is a swift clone of `examples/batched`. $ `make` -$ `./batched_swift MODEL_PATH [PROMPT] [PARALLEL]` +$ `./llama-batched-swift MODEL_PATH [PROMPT] [PARALLEL]` diff --git a/examples/batched/CMakeLists.txt b/examples/batched/CMakeLists.txt index 6aa178d4d..77e33343b 100644 --- a/examples/batched/CMakeLists.txt +++ b/examples/batched/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET batched) +set(TARGET llama-batched) add_executable(${TARGET} batched.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/batched/README.md b/examples/batched/README.md index ed204c308..6013aab01 100644 --- a/examples/batched/README.md +++ b/examples/batched/README.md @@ -3,7 +3,7 @@ The example demonstrates batched generation from a given prompt ```bash -./batched -m ./models/llama-7b-v2/ggml-model-f16.gguf -p "Hello my name is" -np 4 +./llama-batched -m ./models/llama-7b-v2/ggml-model-f16.gguf -p "Hello my name is" -np 4 ... diff --git a/examples/benchmark/CMakeLists.txt b/examples/benchmark/CMakeLists.txt index 2bb47bab5..34a58cc02 100644 --- a/examples/benchmark/CMakeLists.txt +++ b/examples/benchmark/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET benchmark) +set(TARGET llama-bench-matmult) add_executable(${TARGET} benchmark-matmult.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/chat-13B.sh b/examples/chat-13B.sh index 35c089d57..1828903c3 100755 --- a/examples/chat-13B.sh +++ b/examples/chat-13B.sh @@ -30,7 +30,7 @@ sed -e "s/\[\[USER_NAME\]\]/$USER_NAME/g" \ $PROMPT_TEMPLATE > $PROMPT_FILE # shellcheck disable=SC2086 # Intended splitting of GEN_OPTIONS -./main $GEN_OPTIONS \ +./llama-cli $GEN_OPTIONS \ --model "$MODEL" \ --threads "$N_THREAD" \ --n_predict "$N_PREDICTS" \ diff --git a/examples/chat-persistent.sh b/examples/chat-persistent.sh index 22f5b83d3..d9cab9836 100755 --- a/examples/chat-persistent.sh +++ b/examples/chat-persistent.sh @@ -62,7 +62,7 @@ fi if [[ ! -e "$PROMPT_CACHE_FILE" ]]; then echo 'Prompt cache does not exist, building...' # Default batch_size to 64 here for better user feedback during initial prompt processing - ./main 2>>"$LOG" \ + ./llama-cli 2>>"$LOG" \ --batch_size 64 \ "${OPTS[@]}" \ --prompt-cache "$PROMPT_CACHE_FILE" \ @@ -109,13 +109,13 @@ while read -e line; do printf '%s: ' "$AI_NAME" >>"$CUR_PROMPT_FILE" - ./main 2>>"$LOG" "${OPTS[@]}" \ + ./llama-cli 2>>"$LOG" "${OPTS[@]}" \ --prompt-cache "$CUR_PROMPT_CACHE" \ --prompt-cache-all \ --file "$CUR_PROMPT_FILE" \ --reverse-prompt "${USER_NAME}:" \ --n_predict "$n_predict" | - skip_bytes 1 | # skip BOS token added by ./main + skip_bytes 1 | # skip BOS token added by ./llama-cli tee "$CUR_PROMPT_FILE.tmp" | # save prompt + generation to tmp file skip_bytes "$n_prompt_len_pre" # print generation @@ -133,7 +133,7 @@ while read -e line; do # TODO get both messages in one go if ! session_size_msg="$(tail -n30 "$LOG" | grep -oE "$SESSION_SIZE_MSG_PATTERN")" || ! sample_time_msg="$(tail -n10 "$LOG" | grep -oE "$SAMPLE_TIME_MSG_PATTERN")"; then - echo >&2 "Couldn't get number of tokens from ./main output!" + echo >&2 "Couldn't get number of tokens from ./llama-cli output!" exit 1 fi @@ -144,7 +144,7 @@ while read -e line; do fi # Update cache for next prompt in background, ideally during user input - ./main >>"$LOG_BG" 2>&1 "${OPTS[@]}" \ + ./llama-cli >>"$LOG_BG" 2>&1 "${OPTS[@]}" \ --prompt-cache "$NEXT_PROMPT_CACHE" \ --file "$NEXT_PROMPT_FILE" \ --n_predict 1 & diff --git a/examples/chat-vicuna.sh b/examples/chat-vicuna.sh index 8c7b7bef4..ffdd20084 100755 --- a/examples/chat-vicuna.sh +++ b/examples/chat-vicuna.sh @@ -30,7 +30,7 @@ sed -e "s/\[\[USER_NAME\]\]/$USER_NAME/g" \ $PROMPT_TEMPLATE > $PROMPT_FILE # shellcheck disable=SC2086 # Intended splitting of GEN_OPTIONS -./bin/main $GEN_OPTIONS \ +./bin/llama-cli $GEN_OPTIONS \ --model "$MODEL" \ --threads "$N_THREAD" \ --n_predict "$N_PREDICTS" \ diff --git a/examples/chat.sh b/examples/chat.sh index d567acecd..9f85d1e26 100755 --- a/examples/chat.sh +++ b/examples/chat.sh @@ -11,6 +11,6 @@ cd .. # # "--keep 48" is based on the contents of prompts/chat-with-bob.txt # -./main -m ./models/llama-7b/ggml-model-q4_0.gguf -c 512 -b 1024 -n 256 --keep 48 \ +./llama-cli -m ./models/llama-7b/ggml-model-q4_0.gguf -c 512 -b 1024 -n 256 --keep 48 \ --repeat_penalty 1.0 --color -i \ -r "User:" -f prompts/chat-with-bob.txt diff --git a/examples/convert-llama2c-to-ggml/CMakeLists.txt b/examples/convert-llama2c-to-ggml/CMakeLists.txt index e262d44f9..a6790e617 100644 --- a/examples/convert-llama2c-to-ggml/CMakeLists.txt +++ b/examples/convert-llama2c-to-ggml/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET convert-llama2c-to-ggml) +set(TARGET llama-convert-llama2c-to-ggml) add_executable(${TARGET} convert-llama2c-to-ggml.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/convert-llama2c-to-ggml/README.md b/examples/convert-llama2c-to-ggml/README.md index 742dcf7a3..5774ac83c 100644 --- a/examples/convert-llama2c-to-ggml/README.md +++ b/examples/convert-llama2c-to-ggml/README.md @@ -8,7 +8,7 @@ To convert the model first download the models from the [llama2.c](https://githu After successful compilation, following usage options are available: ``` -usage: ./convert-llama2c-to-ggml [options] +usage: ./llama-convert-llama2c-to-ggml [options] options: -h, --help show this help message and exit @@ -19,10 +19,10 @@ options: An example command using a model from [karpathy/tinyllamas](https://huggingface.co/karpathy/tinyllamas) is as follows: -`$ ./convert-llama2c-to-ggml --copy-vocab-from-model llama-2-7b-chat.gguf.q2_K.bin --llama2c-model stories42M.bin --llama2c-output-model stories42M.gguf.bin` +`$ ./llama-convert-llama2c-to-ggml --copy-vocab-from-model llama-2-7b-chat.gguf.q2_K.bin --llama2c-model stories42M.bin --llama2c-output-model stories42M.gguf.bin` Note: The vocabulary for `stories260K.bin` should be its own tokenizer `tok512.bin` found in [karpathy/tinyllamas/stories260K](https://huggingface.co/karpathy/tinyllamas/tree/main/stories260K). Now you can use the model with a command like: -`$ ./main -m stories42M.gguf.bin -p "One day, Lily met a Shoggoth" -n 500 -c 256` +`$ ./llama-cli -m stories42M.gguf.bin -p "One day, Lily met a Shoggoth" -n 500 -c 256` diff --git a/examples/embedding/CMakeLists.txt b/examples/embedding/CMakeLists.txt index 8ffc33868..8256e789a 100644 --- a/examples/embedding/CMakeLists.txt +++ b/examples/embedding/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET embedding) +set(TARGET llama-embedding) add_executable(${TARGET} embedding.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/embedding/README.md b/examples/embedding/README.md index 6929454c5..2298ec3e7 100644 --- a/examples/embedding/README.md +++ b/examples/embedding/README.md @@ -9,13 +9,13 @@ To get started right away, run the following command, making sure to use the cor ### Unix-based systems (Linux, macOS, etc.): ```bash -./embedding -m ./path/to/model --log-disable -p "Hello World!" 2>/dev/null +./llama-embedding -m ./path/to/model --log-disable -p "Hello World!" 2>/dev/null ``` ### Windows: ```powershell -embedding.exe -m ./path/to/model --log-disable -p "Hello World!" 2>$null +llama-embedding.exe -m ./path/to/model --log-disable -p "Hello World!" 2>$null ``` The above command will output space-separated float values. diff --git a/examples/eval-callback/CMakeLists.txt b/examples/eval-callback/CMakeLists.txt index c56ba780b..a48753d38 100644 --- a/examples/eval-callback/CMakeLists.txt +++ b/examples/eval-callback/CMakeLists.txt @@ -1,9 +1,9 @@ -set(TARGET eval-callback) +set(TARGET llama-eval-callback) add_executable(${TARGET} eval-callback.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) set(TEST_TARGET test-eval-callback) -add_test(NAME ${TEST_TARGET} COMMAND eval-callback --hf-repo ggml-org/models --hf-file tinyllamas/stories260K.gguf --model stories260K.gguf --prompt hello --seed 42 -ngl 0) +add_test(NAME ${TEST_TARGET} COMMAND llama-eval-callback --hf-repo ggml-org/models --hf-file tinyllamas/stories260K.gguf --model stories260K.gguf --prompt hello --seed 42 -ngl 0) set_property(TEST ${TEST_TARGET} PROPERTY LABELS eval-callback curl) diff --git a/examples/eval-callback/README.md b/examples/eval-callback/README.md index 66a37e878..63a57ad6b 100644 --- a/examples/eval-callback/README.md +++ b/examples/eval-callback/README.md @@ -6,7 +6,7 @@ It simply prints to the console all operations and tensor data. Usage: ```shell -eval-callback \ +llama-eval-callback \ --hf-repo ggml-org/models \ --hf-file phi-2/ggml-model-q4_0.gguf \ --model phi-2-q4_0.gguf \ diff --git a/examples/export-lora/CMakeLists.txt b/examples/export-lora/CMakeLists.txt index cbbdaec67..1cef6e716 100644 --- a/examples/export-lora/CMakeLists.txt +++ b/examples/export-lora/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET export-lora) +set(TARGET llama-export-lora) add_executable(${TARGET} export-lora.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/export-lora/README.md b/examples/export-lora/README.md index 0cf3e8e45..1fb17feec 100644 --- a/examples/export-lora/README.md +++ b/examples/export-lora/README.md @@ -3,7 +3,7 @@ Apply LORA adapters to base model and export the resulting model. ``` -usage: export-lora [options] +usage: llama-export-lora [options] options: -h, --help show this help message and exit @@ -17,7 +17,7 @@ options: For example: ```bash -./bin/export-lora \ +./bin/llama-export-lora \ -m open-llama-3b-v2-q8_0.gguf \ -o open-llama-3b-v2-q8_0-english2tokipona-chat.gguf \ -l lora-open-llama-3b-v2-q8_0-english2tokipona-chat-LATEST.bin diff --git a/examples/finetune/CMakeLists.txt b/examples/finetune/CMakeLists.txt index 2b52d21cf..64afe6ddc 100644 --- a/examples/finetune/CMakeLists.txt +++ b/examples/finetune/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET finetune) +set(TARGET llama-finetune) add_executable(${TARGET} finetune.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/finetune/README.md b/examples/finetune/README.md index 2fafd505e..a6ae64983 100644 --- a/examples/finetune/README.md +++ b/examples/finetune/README.md @@ -7,7 +7,7 @@ Basic usage instructions: wget https://raw.githubusercontent.com/brunoklein99/deep-learning-notes/master/shakespeare.txt # finetune LORA adapter -./bin/finetune \ +./bin/llama-finetune \ --model-base open-llama-3b-v2-q8_0.gguf \ --checkpoint-in chk-lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.gguf \ --checkpoint-out chk-lora-open-llama-3b-v2-q8_0-shakespeare-ITERATION.gguf \ @@ -18,7 +18,7 @@ wget https://raw.githubusercontent.com/brunoklein99/deep-learning-notes/master/s --use-checkpointing # predict -./bin/main -m open-llama-3b-v2-q8_0.gguf --lora lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.bin +./bin/llama-cli -m open-llama-3b-v2-q8_0.gguf --lora lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.bin ``` **Only llama based models are supported!** The output files will be saved every N iterations (config with `--save-every N`). @@ -38,14 +38,14 @@ After 10 more iterations: Checkpoint files (`--checkpoint-in FN`, `--checkpoint-out FN`) store the training process. When the input checkpoint file does not exist, it will begin finetuning a new randomly initialized adapter. llama.cpp compatible LORA adapters will be saved with filename specified by `--lora-out FN`. -These LORA adapters can then be used by `main` together with the base model, like in the 'predict' example command above. +These LORA adapters can then be used by `llama-cli` together with the base model, like in the 'predict' example command above. -In `main` you can also load multiple LORA adapters, which will then be mixed together. +In `llama-cli` you can also load multiple LORA adapters, which will then be mixed together. For example if you have two LORA adapters `lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.bin` and `lora-open-llama-3b-v2-q8_0-bible-LATEST.bin`, you can mix them together like this: ```bash -./bin/main -m open-llama-3b-v2-q8_0.gguf \ +./bin/llama-cli -m open-llama-3b-v2-q8_0.gguf \ --lora lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.bin \ --lora lora-open-llama-3b-v2-q8_0-bible-LATEST.bin ``` @@ -55,7 +55,7 @@ You can change how strong each LORA adapter is applied to the base model by usin For example to apply 40% of the 'shakespeare' LORA adapter, 80% of the 'bible' LORA adapter and 100% of yet another one: ```bash -./bin/main -m open-llama-3b-v2-q8_0.gguf \ +./bin/llama-cli -m open-llama-3b-v2-q8_0.gguf \ --lora-scaled lora-open-llama-3b-v2-q8_0-shakespeare-LATEST.bin 0.4 \ --lora-scaled lora-open-llama-3b-v2-q8_0-bible-LATEST.bin 0.8 \ --lora lora-open-llama-3b-v2-q8_0-yet-another-one-LATEST.bin diff --git a/examples/finetune/finetune.sh b/examples/finetune/finetune.sh index 079bfa113..d7f2165e5 100644 --- a/examples/finetune/finetune.sh +++ b/examples/finetune/finetune.sh @@ -2,7 +2,7 @@ cd `dirname $0` cd ../.. -EXE="./finetune" +EXE="./llama-finetune" if [[ ! $LLAMA_MODEL_DIR ]]; then LLAMA_MODEL_DIR="./models"; fi if [[ ! $LLAMA_TRAINING_DIR ]]; then LLAMA_TRAINING_DIR="."; fi diff --git a/examples/gbnf-validator/CMakeLists.txt b/examples/gbnf-validator/CMakeLists.txt index 166e3ad2a..4edd6ec73 100644 --- a/examples/gbnf-validator/CMakeLists.txt +++ b/examples/gbnf-validator/CMakeLists.txt @@ -1,5 +1,5 @@ -set(TARGET gbnf-validator) +set(TARGET llama-gbnf-validator) add_executable(${TARGET} gbnf-validator.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE common grammar-parser llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/gbnf-validator/gbnf-validator.cpp b/examples/gbnf-validator/gbnf-validator.cpp index 091069ffa..0406dc339 100644 --- a/examples/gbnf-validator/gbnf-validator.cpp +++ b/examples/gbnf-validator/gbnf-validator.cpp @@ -7,6 +7,8 @@ #include #include +#include +#include #include #include @@ -69,13 +71,14 @@ int main(int argc, char** argv) { return 1; } - fseek(grammar_file, 0, SEEK_END); - size_t grammar_size = ftell(grammar_file); - fseek(grammar_file, 0, SEEK_SET); - - std::string grammar_str(grammar_size, ' '); - fread(&grammar_str[0], 1, grammar_size, grammar_file); - fclose(grammar_file); + std::string grammar_str; + { + std::ifstream grammar_file(grammar_filename); + GGML_ASSERT(grammar_file.is_open() && "Failed to open grammar file"); + std::stringstream buffer; + buffer << grammar_file.rdbuf(); + grammar_str = buffer.str(); + } // Parse the GBNF grammar auto parsed_grammar = grammar_parser::parse(grammar_str.c_str()); @@ -100,20 +103,15 @@ int main(int argc, char** argv) { grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); // Read the input file - FILE* input_file = fopen(input_filename.c_str(), "r"); - if (!input_file) { - fprintf(stdout, "Failed to open input file: %s\n", input_filename.c_str()); - return 1; + std::string input_str; + { + std::ifstream input_file(input_filename); + GGML_ASSERT(input_file.is_open() && "Failed to open input file"); + std::stringstream buffer; + buffer << input_file.rdbuf(); + input_str = buffer.str(); } - fseek(input_file, 0, SEEK_END); - size_t input_size = ftell(input_file); - fseek(input_file, 0, SEEK_SET); - - std::string input_str(input_size, ' '); - fread(&input_str[0], 1, input_size, input_file); - fclose(input_file); - // Validate the input string against the grammar size_t error_pos; std::string error_msg; diff --git a/examples/gguf-split/CMakeLists.txt b/examples/gguf-split/CMakeLists.txt index 828e62435..f63887da7 100644 --- a/examples/gguf-split/CMakeLists.txt +++ b/examples/gguf-split/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET gguf-split) +set(TARGET llama-gguf-split) add_executable(${TARGET} gguf-split.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/gguf-split/tests.sh b/examples/gguf-split/tests.sh index 3bc0fa471..d5a92d605 100755 --- a/examples/gguf-split/tests.sh +++ b/examples/gguf-split/tests.sh @@ -18,8 +18,8 @@ fi set -x -SPLIT=$1/gguf-split -MAIN=$1/main +SPLIT=$1/llama-gguf-split +MAIN=$1/llama-cli WORK_PATH=$TMP_DIR/gguf-split ROOT_DIR=$(realpath $(dirname $0)/../../) diff --git a/examples/gguf/CMakeLists.txt b/examples/gguf/CMakeLists.txt index 6481f087b..a9569b411 100644 --- a/examples/gguf/CMakeLists.txt +++ b/examples/gguf/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET gguf) +set(TARGET llama-gguf) add_executable(${TARGET} gguf.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE ggml ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/gritlm/CMakeLists.txt b/examples/gritlm/CMakeLists.txt index ac4a5ae79..86dfddca3 100644 --- a/examples/gritlm/CMakeLists.txt +++ b/examples/gritlm/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET gritlm) +set(TARGET llama-gritlm) add_executable(${TARGET} gritlm.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/gritlm/README.md b/examples/gritlm/README.md index a3a3c1389..786ba5736 100644 --- a/examples/gritlm/README.md +++ b/examples/gritlm/README.md @@ -26,7 +26,7 @@ $ scripts/hf.sh --repo cohesionet/GritLM-7B_gguf --file gritlm-7b_q4_1.gguf --ou Run the example using the downloaded model: ```console -$ ./gritlm -m models/gritlm-7b_q4_1.gguf +$ ./llama-gritlm -m models/gritlm-7b_q4_1.gguf Cosine similarity between "Bitcoin: A Peer-to-Peer Electronic Cash System" and "A purely peer-to-peer version of electronic cash w" is: 0.605 Cosine similarity between "Bitcoin: A Peer-to-Peer Electronic Cash System" and "All text-based language problems can be reduced to" is: 0.103 diff --git a/examples/imatrix/CMakeLists.txt b/examples/imatrix/CMakeLists.txt index d688a1620..d4c8265bd 100644 --- a/examples/imatrix/CMakeLists.txt +++ b/examples/imatrix/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET imatrix) +set(TARGET llama-imatrix) add_executable(${TARGET} imatrix.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/imatrix/README.md b/examples/imatrix/README.md index 866ca9f56..38b36ee5a 100644 --- a/examples/imatrix/README.md +++ b/examples/imatrix/README.md @@ -6,7 +6,7 @@ More information is available here: https://github.com/ggerganov/llama.cpp/pull/ ## Usage ``` -./imatrix \ +./llama-imatrix \ -m model.gguf -f some-text.txt [-o imatrix.dat] [--process-output] [--verbosity 1] \ [--no-ppl] [--chunk 123] [--output-frequency 10] [--save-frequency 0] \ [--in-file imatrix-prev-0.dat --in-file imatrix-prev-1.dat ...] @@ -28,8 +28,8 @@ For faster computation, make sure to use GPU offloading via the `-ngl` argument LLAMA_CUDA=1 make -j # generate importance matrix (imatrix.dat) -./imatrix -m ggml-model-f16.gguf -f train-data.txt -ngl 99 +./llama-imatrix -m ggml-model-f16.gguf -f train-data.txt -ngl 99 # use the imatrix to perform a Q4_K_M quantization -./quantize --imatrix imatrix.dat ggml-model-f16.gguf ./ggml-model-q4_k_m.gguf q4_k_m +./llama-quantize --imatrix imatrix.dat ggml-model-f16.gguf ./ggml-model-q4_k_m.gguf q4_k_m ``` diff --git a/examples/infill/CMakeLists.txt b/examples/infill/CMakeLists.txt index e4e8028da..9b1aa3b63 100644 --- a/examples/infill/CMakeLists.txt +++ b/examples/infill/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET infill) +set(TARGET llama-infill) add_executable(${TARGET} infill.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/infill/README.md b/examples/infill/README.md index 6b076c839..74f42d2fc 100644 --- a/examples/infill/README.md +++ b/examples/infill/README.md @@ -42,5 +42,5 @@ scripts/hf.sh --repo TheBloke/CodeLlama-13B-GGUF --file codellama-13b.Q5_K_S.ggu ``` ```bash -./infill -t 10 -ngl 0 -m models/codellama-13b.Q5_K_S.gguf -c 4096 --temp 0.7 --repeat_penalty 1.1 -n 20 --in-prefix "def helloworld():\n print(\"hell" --in-suffix "\n print(\"goodbye world\")\n " +./llama-infill -t 10 -ngl 0 -m models/codellama-13b.Q5_K_S.gguf -c 4096 --temp 0.7 --repeat_penalty 1.1 -n 20 --in-prefix "def helloworld():\n print(\"hell" --in-suffix "\n print(\"goodbye world\")\n " ``` diff --git a/examples/jeopardy/jeopardy.sh b/examples/jeopardy/jeopardy.sh index 9bdbc755c..07bcb3b8d 100755 --- a/examples/jeopardy/jeopardy.sh +++ b/examples/jeopardy/jeopardy.sh @@ -21,7 +21,7 @@ counter=1 echo 'Running' while IFS= read -r question do - exe_cmd="./main -p "\"$prefix$introduction$nl$prefix$question\"" "$opts" -m ""\"$MODEL\""" >> ""\"$output_file\"" + exe_cmd="./llama-cli -p "\"$prefix$introduction$nl$prefix$question\"" "$opts" -m ""\"$MODEL\""" >> ""\"$output_file\"" echo $counter echo "Current Question: $question" eval "$exe_cmd" diff --git a/examples/json-schema-pydantic-example.py b/examples/json-schema-pydantic-example.py index 69ebfd409..cc64e572b 100644 --- a/examples/json-schema-pydantic-example.py +++ b/examples/json-schema-pydantic-example.py @@ -1,5 +1,5 @@ # Usage: -#! ./server -m some-model.gguf & +#! ./llama-server -m some-model.gguf & #! pip install pydantic #! python json-schema-pydantic-example.py diff --git a/examples/json_schema_to_grammar.py b/examples/json_schema_to_grammar.py index ab19e20df..b588497b9 100755 --- a/examples/json_schema_to_grammar.py +++ b/examples/json_schema_to_grammar.py @@ -523,7 +523,7 @@ class SchemaConverter: def main(args_in = None): parser = argparse.ArgumentParser( description=''' - Generates a grammar (suitable for use in ./main) that produces JSON conforming to a + Generates a grammar (suitable for use in ./llama-cli) that produces JSON conforming to a given JSON schema. Only a subset of JSON schema features are supported; more may be added in the future. ''', diff --git a/examples/llama-bench/README.md b/examples/llama-bench/README.md index fd95b35f4..52b0e74d3 100644 --- a/examples/llama-bench/README.md +++ b/examples/llama-bench/README.md @@ -1,4 +1,4 @@ -# llama.cpp/example/llama-bench +# llama.cpp/examples/llama-bench Performance testing tool for llama.cpp. diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 2985caff8..e9fa73acb 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -30,8 +30,9 @@ if(TARGET BUILD_INFO) add_dependencies(llava BUILD_INFO) endif() -set(TARGET llava-cli) -add_executable(llava-cli llava-cli.cpp) -install(TARGETS llava-cli RUNTIME) -target_link_libraries(llava-cli PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(llava PRIVATE cxx_std_11) +set(TARGET llama-llava-cli) +add_executable(${TARGET} llava-cli.cpp) +set_target_properties(${TARGET} PROPERTIES OUTPUT_NAME llama-llava-cli) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/llava/MobileVLM-README.md b/examples/llava/MobileVLM-README.md index 74f021dec..05a8207e6 100644 --- a/examples/llava/MobileVLM-README.md +++ b/examples/llava/MobileVLM-README.md @@ -9,12 +9,12 @@ The implementation is based on llava, and is compatible with llava and mobileVLM Notice: The overall process of model inference for both **MobileVLM** and **MobileVLM_V2** models is the same, but the process of model conversion is a little different. Therefore, using **MobileVLM-1.7B** as an example, the different conversion step will be shown. ## Usage -Build with cmake or run `make llava-cli` to build it. +Build with cmake or run `make llama-llava-cli` to build it. -After building, run: `./llava-cli` to see the usage. For example: +After building, run: `./llama-llava-cli` to see the usage. For example: ```sh -./llava-cli -m MobileVLM-1.7B/ggml-model-q4_k.gguf \ +./llama-llava-cli -m MobileVLM-1.7B/ggml-model-q4_k.gguf \ --mmproj MobileVLM-1.7B/mmproj-model-f16.gguf \ --image path/to/an/image.jpg \ -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWho is the author of this book? Answer the question using a single word or phrase. ASSISTANT:" @@ -62,7 +62,7 @@ python ./examples/convert-legacy-llama.py path/to/MobileVLM-1.7B 5. Use `quantize` to convert LLaMA part's DataType from `fp16` to `q4_k` ```sh -./quantize path/to/MobileVLM-1.7B/ggml-model-f16.gguf path/to/MobileVLM-1.7B/ggml-model-q4_k.gguf q4_k_s +./llama-quantize path/to/MobileVLM-1.7B/ggml-model-f16.gguf path/to/MobileVLM-1.7B/ggml-model-q4_k.gguf q4_k_s ``` Now both the LLaMA part and the image encoder is in the `MobileVLM-1.7B` directory. @@ -82,7 +82,7 @@ refer to `android/adb_run.sh`, modify resources' `name` and `path` ### case 1 **input** ```sh -/data/local/tmp/llava-cli \ +/data/local/tmp/llama-llava-cli \ -m /data/local/tmp/ggml-model-q4_k.gguf \ --mmproj /data/local/tmp/mmproj-model-f16.gguf \ -t 4 \ @@ -102,7 +102,7 @@ llama_print_timings: total time = 34731.93 ms ### case 2 **input** ```sh -/data/local/tmp/llava-cli \ +/data/local/tmp/llama-llava-cli \ -m /data/local/tmp/ggml-model-q4_k.gguf \ --mmproj /data/local/tmp/mmproj-model-f16.gguf \ -t 4 \ @@ -126,7 +126,7 @@ llama_print_timings: total time = 34570.79 ms #### llava-cli release-b2005 **input** ```sh -/data/local/tmp/llava-cli \ +/data/local/tmp/llama-llava-cli \ -m /data/local/tmp/ggml-model-q4_k.gguf \ --mmproj /data/local/tmp/mmproj-model-f16.gguf \ -t 4 \ @@ -200,7 +200,7 @@ make LLAMA_CUDA=1 CUDA_DOCKER_ARCH=sm_87 LLAMA_CUDA_F16=1 -j 32 ### case 1 **input** ```sh -./llava-cli \ +./llama-llava-cli \ -m /data/local/tmp/ggml-model-q4_k.gguf \ --mmproj /data/local/tmp/mmproj-model-f16.gguf \ --image /data/local/tmp/demo.jpeg \ @@ -224,7 +224,7 @@ llama_print_timings: total time = 1352.63 ms / 252 tokens ### case 2 **input** ```sh -./llava-cli \ +./llama-llava-cli \ -m /data/local/tmp/ggml-model-q4_k.gguf \ --mmproj /data/local/tmp/mmproj-model-f16.gguf \ -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWhat is in the image? ASSISTANT:" \ diff --git a/examples/llava/README.md b/examples/llava/README.md index 8d1ae5270..f4554de67 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -11,12 +11,12 @@ For llava-1.6 a variety of prepared gguf models are available as well [7b-34b](h After API is confirmed, more models will be supported / uploaded. ## Usage -Build with cmake or run `make llava-cli` to build it. +Build with cmake or run `make llama-llava-cli` to build it. -After building, run: `./llava-cli` to see the usage. For example: +After building, run: `./llama-llava-cli` to see the usage. For example: ```sh -./llava-cli -m ../llava-v1.5-7b/ggml-model-f16.gguf --mmproj ../llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg +./llama-llava-cli -m ../llava-v1.5-7b/ggml-model-f16.gguf --mmproj ../llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. @@ -95,9 +95,9 @@ python ./examples/llava/convert-image-encoder-to-gguf.py -m vit --llava-projecto python ./examples/convert-legacy-llama.py ../llava-v1.6-vicuna-7b/ --skip-unknown ``` -7) And finally we can run the llava-cli using the 1.6 model version: +7) And finally we can run the llava cli using the 1.6 model version: ```console -./llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 +./llama-llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 ``` **note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) diff --git a/examples/llava/android/adb_run.sh b/examples/llava/android/adb_run.sh index f73623ae3..45ccf8d70 100755 --- a/examples/llava/android/adb_run.sh +++ b/examples/llava/android/adb_run.sh @@ -10,7 +10,7 @@ prompt="A chat between a curious user and an artificial intelligence assistant. # prompt="A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWhat is in the image? ASSISTANT:" program_dir="build_64/bin" -binName="llava-cli" +binName="llama-llava-cli" n_threads=4 diff --git a/examples/lookahead/CMakeLists.txt b/examples/lookahead/CMakeLists.txt index 8827e3f11..f0ae5cd89 100644 --- a/examples/lookahead/CMakeLists.txt +++ b/examples/lookahead/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET lookahead) +set(TARGET llama-lookahead) add_executable(${TARGET} lookahead.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/lookup/CMakeLists.txt b/examples/lookup/CMakeLists.txt index b91633f63..ef19fe25e 100644 --- a/examples/lookup/CMakeLists.txt +++ b/examples/lookup/CMakeLists.txt @@ -1,22 +1,22 @@ -set(TARGET lookup) +set(TARGET llama-lookup) add_executable(${TARGET} lookup.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -set(TARGET lookup-create) +set(TARGET llama-lookup-create) add_executable(${TARGET} lookup-create.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -set(TARGET lookup-merge) +set(TARGET llama-lookup-merge) add_executable(${TARGET} lookup-merge.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -set(TARGET lookup-stats) +set(TARGET llama-lookup-stats) add_executable(${TARGET} lookup-stats.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/lookup/lookup-merge.cpp b/examples/lookup/lookup-merge.cpp index 07c93eb8d..81e2b0436 100644 --- a/examples/lookup/lookup-merge.cpp +++ b/examples/lookup/lookup-merge.cpp @@ -11,14 +11,14 @@ #include #include -static void print_usage() { +static void print_usage(char* argv0) { fprintf(stderr, "Merges multiple lookup cache files into a single one.\n"); - fprintf(stderr, "Usage: lookup-merge [--help] lookup_part_1.bin lookup_part_2.bin ... lookup_merged.bin\n"); + fprintf(stderr, "Usage: %s [--help] lookup_part_1.bin lookup_part_2.bin ... lookup_merged.bin\n", argv0); } int main(int argc, char ** argv){ if (argc < 3) { - print_usage(); + print_usage(argv[0]); exit(1); } @@ -27,7 +27,7 @@ int main(int argc, char ** argv){ for (int i = 0; i < argc-1; ++i) { args[i] = argv[i+1]; if (args[i] == "-h" || args[i] == "--help") { - print_usage(); + print_usage(argv[0]); exit(0); } } diff --git a/examples/main-cmake-pkg/CMakeLists.txt b/examples/main-cmake-pkg/CMakeLists.txt index deb77d588..a97ded365 100644 --- a/examples/main-cmake-pkg/CMakeLists.txt +++ b/examples/main-cmake-pkg/CMakeLists.txt @@ -1,12 +1,12 @@ cmake_minimum_required(VERSION 3.12) -project("main-cmake-pkg" C CXX) -set(TARGET main-cmake-pkg) +project("llama-cli-cmake-pkg" C CXX) +set(TARGET llama-cli-cmake-pkg) find_package(Llama 0.0.1 REQUIRED) # Bake common functionality in with target. Because applications # using the relocatable Llama package should be outside of the -# source tree, main-cmake-pkg pretends the dependencies are built-in. +# source tree, llama-cli-cmake-pkg pretends the dependencies are built-in. set(_common_path "${CMAKE_CURRENT_LIST_DIR}/../../common") add_library(common OBJECT) file(GLOB _common_files @@ -15,7 +15,7 @@ file(GLOB _common_files ) target_sources(common PRIVATE ${_common_files}) -# If the common project was part of "main-cmake-pkg" the transient +# If the common project was part of "llama-cli-cmake-pkg" the transient # defines would automatically be attached. Because the common func- # tionality is separate, but dependent upon the defines, it must be # explicitly extracted from the "llama" target. diff --git a/examples/main-cmake-pkg/README.md b/examples/main-cmake-pkg/README.md index a88e92f23..08d83dd08 100644 --- a/examples/main-cmake-pkg/README.md +++ b/examples/main-cmake-pkg/README.md @@ -1,6 +1,6 @@ # llama.cpp/example/main-cmake-pkg -This program builds the [main](../main) application using a relocatable CMake package. It serves as an example of using the `find_package()` CMake command to conveniently include [llama.cpp](https://github.com/ggerganov/llama.cpp) in projects which live outside of the source tree. +This program builds [llama-cli](../main) using a relocatable CMake package. It serves as an example of using the `find_package()` CMake command to conveniently include [llama.cpp](https://github.com/ggerganov/llama.cpp) in projects which live outside of the source tree. ## Building @@ -20,7 +20,7 @@ cmake --build build --config Release cmake --install build --prefix C:/LlamaCPP ``` -### Build main-cmake-pkg +### Build llama-cli-cmake-pkg ```cmd diff --git a/examples/main/CMakeLists.txt b/examples/main/CMakeLists.txt index d532980b7..5f6efaa9a 100644 --- a/examples/main/CMakeLists.txt +++ b/examples/main/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET main) +set(TARGET llama-cli) add_executable(${TARGET} main.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/main/README.md b/examples/main/README.md index cdc002f15..61e4a42f7 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -1,4 +1,4 @@ -# llama.cpp/example/main +# llama.cpp/examples/main This example program allows you to use various LLaMA language models in an easy and efficient way. It is specifically designed to work with the [llama.cpp](https://github.com/ggerganov/llama.cpp) project, which provides a plain C/C++ implementation with optional 4-bit quantization support for faster, lower memory inference, and is optimized for desktop CPUs. This program can be used to perform various inference tasks with LLaMA models, including generating text based on user-provided prompts and chat-like interactions with reverse prompts. @@ -20,13 +20,13 @@ To get started right away, run the following command, making sure to use the cor #### Unix-based systems (Linux, macOS, etc.): ```bash -./main -m models/7B/ggml-model.bin --prompt "Once upon a time" +./llama-cli -m models/7B/ggml-model.bin --prompt "Once upon a time" ``` #### Windows: ```powershell -main.exe -m models\7B\ggml-model.bin --prompt "Once upon a time" +llama-cli.exe -m models\7B\ggml-model.bin --prompt "Once upon a time" ``` For an interactive experience, try this command: @@ -34,7 +34,7 @@ For an interactive experience, try this command: #### Unix-based systems (Linux, macOS, etc.): ```bash -./main -m models/7B/ggml-model.bin -n -1 --color -r "User:" --in-prefix " " -i -p \ +./llama-cli -m models/7B/ggml-model.bin -n -1 --color -r "User:" --in-prefix " " -i -p \ 'User: Hi AI: Hello. I am an AI chatbot. Would you like to talk? User: Sure! @@ -45,7 +45,7 @@ User:' #### Windows: ```powershell -main.exe -m models\7B\ggml-model.bin -n -1 --color -r "User:" --in-prefix " " -i -e -p "User: Hi\nAI: Hello. I am an AI chatbot. Would you like to talk?\nUser: Sure!\nAI: What would you like to talk about?\nUser:" +llama-cli.exe -m models\7B\ggml-model.bin -n -1 --color -r "User:" --in-prefix " " -i -e -p "User: Hi\nAI: Hello. I am an AI chatbot. Would you like to talk?\nUser: Sure!\nAI: What would you like to talk about?\nUser:" ``` The following command generates "infinite" text from a starting prompt (you can use `Ctrl-C` to stop it): @@ -53,18 +53,18 @@ The following command generates "infinite" text from a starting prompt (you can #### Unix-based systems (Linux, macOS, etc.): ```bash -./main -m models/7B/ggml-model.bin --ignore-eos -n -1 +./llama-cli -m models/7B/ggml-model.bin --ignore-eos -n -1 ``` #### Windows: ```powershell -main.exe -m models\7B\ggml-model.bin --ignore-eos -n -1 +llama-cli.exe -m models\7B\ggml-model.bin --ignore-eos -n -1 ``` ## Common Options -In this section, we cover the most commonly used options for running the `main` program with the LLaMA models: +In this section, we cover the most commonly used options for running the `llama-cli` program with the LLaMA models: - `-m FNAME, --model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`; inferred from `--model-url` if set). - `-mu MODEL_URL --model-url MODEL_URL`: Specify a remote http url to download the file (e.g https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf). @@ -74,7 +74,7 @@ In this section, we cover the most commonly used options for running the `main` ## Input Prompts -The `main` program provides several ways to interact with the LLaMA models using input prompts: +The `llama-cli` program provides several ways to interact with the LLaMA models using input prompts: - `--prompt PROMPT`: Provide a prompt directly as a command-line option. - `--file FNAME`: Provide a file containing a prompt or multiple prompts. @@ -82,7 +82,7 @@ The `main` program provides several ways to interact with the LLaMA models using ## Interaction -The `main` program offers a seamless way to interact with LLaMA models, allowing users to engage in real-time conversations or provide instructions for specific tasks. The interactive mode can be triggered using various options, including `--interactive` and `--interactive-first`. +The `llama-cli` program offers a seamless way to interact with LLaMA models, allowing users to engage in real-time conversations or provide instructions for specific tasks. The interactive mode can be triggered using various options, including `--interactive` and `--interactive-first`. In interactive mode, users can participate in text generation by injecting their input during the process. Users can press `Ctrl+C` at any time to interject and type their input, followed by pressing `Return` to submit it to the LLaMA model. To submit additional lines without finalizing input, users can end the current line with a backslash (`\`) and continue typing. @@ -107,7 +107,7 @@ To overcome this limitation, you can use the `--in-prefix` flag to add a space o The `--in-prefix` flag is used to add a prefix to your input, primarily, this is used to insert a space after the reverse prompt. Here's an example of how to use the `--in-prefix` flag in conjunction with the `--reverse-prompt` flag: ```sh -./main -r "User:" --in-prefix " " +./llama-cli -r "User:" --in-prefix " " ``` ### In-Suffix @@ -115,7 +115,7 @@ The `--in-prefix` flag is used to add a prefix to your input, primarily, this is The `--in-suffix` flag is used to add a suffix after your input. This is useful for adding an "Assistant:" prompt after the user's input. It's added after the new-line character (`\n`) that's automatically added to the end of the user's input. Here's an example of how to use the `--in-suffix` flag in conjunction with the `--reverse-prompt` flag: ```sh -./main -r "User:" --in-prefix " " --in-suffix "Assistant:" +./llama-cli -r "User:" --in-prefix " " --in-suffix "Assistant:" ``` ## Context Management diff --git a/examples/parallel/CMakeLists.txt b/examples/parallel/CMakeLists.txt index 319535a6e..c13557bac 100644 --- a/examples/parallel/CMakeLists.txt +++ b/examples/parallel/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET parallel) +set(TARGET llama-parallel) add_executable(${TARGET} parallel.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/passkey/CMakeLists.txt b/examples/passkey/CMakeLists.txt index 3161bf3ef..dc467a5d3 100644 --- a/examples/passkey/CMakeLists.txt +++ b/examples/passkey/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET passkey) +set(TARGET llama-passkey) add_executable(${TARGET} passkey.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/passkey/README.md b/examples/passkey/README.md index 9e7a119ba..a48a6283a 100644 --- a/examples/passkey/README.md +++ b/examples/passkey/README.md @@ -8,5 +8,5 @@ See the following PRs for more info: ### Usage ```bash -make -j && ./passkey -m ./models/llama-7b-v2/ggml-model-f16.gguf --junk 250 +make -j && ./llama-passkey -m ./models/llama-7b-v2/ggml-model-f16.gguf --junk 250 ``` diff --git a/examples/perplexity/CMakeLists.txt b/examples/perplexity/CMakeLists.txt index 3c76d3221..be0f2fd02 100644 --- a/examples/perplexity/CMakeLists.txt +++ b/examples/perplexity/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET perplexity) +set(TARGET llama-perplexity) add_executable(${TARGET} perplexity.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 0bd78c21a..efde8dfdf 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -476,7 +476,7 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par } // Download: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip - // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` + // Run `./llama-perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval diff --git a/examples/quantize-stats/CMakeLists.txt b/examples/quantize-stats/CMakeLists.txt index e31cf5e38..bb986a716 100644 --- a/examples/quantize-stats/CMakeLists.txt +++ b/examples/quantize-stats/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET quantize-stats) +set(TARGET llama-quantize-stats) add_executable(${TARGET} quantize-stats.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/quantize/CMakeLists.txt b/examples/quantize/CMakeLists.txt index 6b977fde8..3ee4eb971 100644 --- a/examples/quantize/CMakeLists.txt +++ b/examples/quantize/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET quantize) +set(TARGET llama-quantize) add_executable(${TARGET} quantize.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama common ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/quantize/tests.sh b/examples/quantize/tests.sh index 38e28ffc3..24bc970e8 100644 --- a/examples/quantize/tests.sh +++ b/examples/quantize/tests.sh @@ -18,9 +18,9 @@ fi set -x -SPLIT=$1/gguf-split -QUANTIZE=$1/quantize -MAIN=$1/main +SPLIT=$1/llama-gguf-split +QUANTIZE=$1/llama-quantize +MAIN=$1/llama-cli WORK_PATH=$TMP_DIR/quantize ROOT_DIR=$(realpath $(dirname $0)/../../) diff --git a/examples/reason-act.sh b/examples/reason-act.sh index 046c48db5..06d592799 100755 --- a/examples/reason-act.sh +++ b/examples/reason-act.sh @@ -8,7 +8,7 @@ if [ "$1" == "-m" ]; then MODEL="-m $2 " fi -./main $MODEL --color \ +./llama-cli $MODEL --color \ -f ./prompts/reason-act.txt \ -i --interactive-first \ --top_k 10000 --temp 0.2 --repeat_penalty 1 -t 7 -c 2048 \ diff --git a/examples/retrieval/CMakeLists.txt b/examples/retrieval/CMakeLists.txt index eaabae08d..66610f311 100644 --- a/examples/retrieval/CMakeLists.txt +++ b/examples/retrieval/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET retrieval) +set(TARGET llama-retrieval) add_executable(${TARGET} retrieval.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/retrieval/README.md b/examples/retrieval/README.md index 2b2595c46..bc5f22e2f 100644 --- a/examples/retrieval/README.md +++ b/examples/retrieval/README.md @@ -15,7 +15,7 @@ https://github.com/ggerganov/llama.cpp/pull/6193 `retrieval` example can be tested as follows: ```bash -make -j && ./retrieval --model ./models/bge-base-en-v1.5-f16.gguf --top-k 3 --context-file README.md --context-file License --chunk-size 100 --chunk-separator . +make -j && ./llama-retrieval --model ./models/bge-base-en-v1.5-f16.gguf --top-k 3 --context-file README.md --context-file License --chunk-size 100 --chunk-separator . ``` This chunks and embeds all given files and starts a loop requesting query inputs: diff --git a/examples/rpc/README.md b/examples/rpc/README.md index eeec71a8e..86544e3fe 100644 --- a/examples/rpc/README.md +++ b/examples/rpc/README.md @@ -70,5 +70,5 @@ cmake --build . --config Release Finally, use the `--rpc` option to specify the host and port of each `rpc-server`: ```bash -$ bin/main -m ../models/tinyllama-1b/ggml-model-f16.gguf -p "Hello, my name is" --repeat-penalty 1.0 -n 64 --rpc 192.168.88.10:50052,192.168.88.11:50052 -ngl 99 +$ bin/llama-cli -m ../models/tinyllama-1b/ggml-model-f16.gguf -p "Hello, my name is" --repeat-penalty 1.0 -n 64 --rpc 192.168.88.10:50052,192.168.88.11:50052 -ngl 99 ``` diff --git a/examples/save-load-state/CMakeLists.txt b/examples/save-load-state/CMakeLists.txt index cc6ed8554..0fb5e359b 100644 --- a/examples/save-load-state/CMakeLists.txt +++ b/examples/save-load-state/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET save-load-state) +set(TARGET llama-save-load-state) add_executable(${TARGET} save-load-state.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/server-llama2-13B.sh b/examples/server-llama2-13B.sh index 17fedc2b1..4ce79b7fa 100755 --- a/examples/server-llama2-13B.sh +++ b/examples/server-llama2-13B.sh @@ -16,7 +16,7 @@ GEN_OPTIONS="${GEN_OPTIONS:---ctx_size 4096 --batch-size 1024}" # shellcheck disable=SC2086 # Intended splitting of GEN_OPTIONS -./server $GEN_OPTIONS \ +./llama-server $GEN_OPTIONS \ --model "$MODEL" \ --threads "$N_THREAD" \ --rope-freq-scale 1.0 \ diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index dab709619..8365f9510 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET server) +set(TARGET llama-server) option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON) option(LLAMA_SERVER_SSL "Build SSL support for the server" OFF) include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/examples/server/README.md b/examples/server/README.md index ccbdcdbdb..e7fb0bf64 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -80,26 +80,26 @@ The project is under active development, and we are [looking for feedback and co ## Build -`server` is built alongside everything else from the root of the project +`llama-server` is built alongside everything else from the root of the project - Using `make`: ```bash - make server + make llama-server ``` - Using `CMake`: ```bash cmake -B build - cmake --build build --config Release -t server + cmake --build build --config Release -t llama-server ``` - Binary is at `./build/bin/server` + Binary is at `./build/bin/llama-server` ## Build with SSL -`server` can also be built with SSL support using OpenSSL 3 +`llama-server` can also be built with SSL support using OpenSSL 3 - Using `make`: @@ -107,14 +107,14 @@ The project is under active development, and we are [looking for feedback and co # NOTE: For non-system openssl, use the following: # CXXFLAGS="-I /path/to/openssl/include" # LDFLAGS="-L /path/to/openssl/lib" - make LLAMA_SERVER_SSL=true server + make LLAMA_SERVER_SSL=true llama-server ``` - Using `CMake`: ```bash cmake -B build -DLLAMA_SERVER_SSL=ON - cmake --build build --config Release -t server + cmake --build build --config Release -t llama-server ``` ## Quick Start @@ -124,13 +124,13 @@ To get started right away, run the following command, making sure to use the cor ### Unix-based systems (Linux, macOS, etc.) ```bash -./server -m models/7B/ggml-model.gguf -c 2048 +./llama-server -m models/7B/ggml-model.gguf -c 2048 ``` ### Windows ```powershell -server.exe -m models\7B\ggml-model.gguf -c 2048 +llama-server.exe -m models\7B\ggml-model.gguf -c 2048 ``` The above command will start a server that by default listens on `127.0.0.1:8080`. @@ -629,11 +629,11 @@ bash chat.sh ### OAI-like API -The HTTP `server` supports an OAI-like API: https://github.com/openai/openai-openapi +The HTTP `llama-server` supports an OAI-like API: https://github.com/openai/openai-openapi ### API errors -`server` returns errors in the same format as OAI: https://github.com/openai/openai-openapi +`llama-server` returns errors in the same format as OAI: https://github.com/openai/openai-openapi Example of an error: diff --git a/examples/server/bench/README.md b/examples/server/bench/README.md index 23a3ec975..0f18ca396 100644 --- a/examples/server/bench/README.md +++ b/examples/server/bench/README.md @@ -99,7 +99,7 @@ The `bench.py` script does several steps: It aims to be used in the CI, but you can run it manually: ```shell -LLAMA_SERVER_BIN_PATH=../../../cmake-build-release/bin/server python bench.py \ +LLAMA_SERVER_BIN_PATH=../../../cmake-build-release/bin/llama-server python bench.py \ --runner-label local \ --name local \ --branch `git rev-parse --abbrev-ref HEAD` \ diff --git a/examples/server/bench/bench.py b/examples/server/bench/bench.py index 86c5de101..4fbbb2032 100644 --- a/examples/server/bench/bench.py +++ b/examples/server/bench/bench.py @@ -245,7 +245,7 @@ def start_server(args): def start_server_background(args): # Start the server - server_path = '../../../build/bin/server' + server_path = '../../../build/bin/llama-server' if 'LLAMA_SERVER_BIN_PATH' in os.environ: server_path = os.environ['LLAMA_SERVER_BIN_PATH'] server_args = [ diff --git a/examples/server/public_simplechat/readme.md b/examples/server/public_simplechat/readme.md index 36a46885d..2dc177825 100644 --- a/examples/server/public_simplechat/readme.md +++ b/examples/server/public_simplechat/readme.md @@ -44,12 +44,12 @@ http module. ### running using examples/server -bin/server -m path/model.gguf --path ../examples/server/public_simplechat [--port PORT] +./llama-server -m path/model.gguf --path examples/server/public_simplechat [--port PORT] ### running using python3's server module first run examples/server -* bin/server -m path/model.gguf +* ./llama-server -m path/model.gguf next run this web front end in examples/server/public_simplechat * cd ../examples/server/public_simplechat diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index 83c0208f3..5e6cb277b 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -27,10 +27,8 @@ To mitigate it, you can increase values in `n_predict`, `kv_size`. ```shell cd ../../.. -mkdir build -cd build -cmake -DLLAMA_CURL=ON ../ -cmake --build . --target server +cmake -B build -DLLAMA_CURL=ON +cmake --build build --target llama-server ``` 2. Start the test: `./tests.sh` @@ -40,7 +38,7 @@ It's possible to override some scenario steps values with environment variables: | variable | description | |--------------------------|------------------------------------------------------------------------------------------------| | `PORT` | `context.server_port` to set the listening port of the server during scenario, default: `8080` | -| `LLAMA_SERVER_BIN_PATH` | to change the server binary path, default: `../../../build/bin/server` | +| `LLAMA_SERVER_BIN_PATH` | to change the server binary path, default: `../../../build/bin/llama-server` | | `DEBUG` | "ON" to enable steps and server verbose mode `--verbose` | | `SERVER_LOG_FORMAT_JSON` | if set switch server logs to json format | | `N_GPU_LAYERS` | number of model layers to offload to VRAM `-ngl --n-gpu-layers` | diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 26d9359d7..7b5dabb01 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -1272,9 +1272,9 @@ def context_text(context): def start_server_background(context): if os.name == 'nt': - context.server_path = '../../../build/bin/Release/server.exe' + context.server_path = '../../../build/bin/Release/llama-server.exe' else: - context.server_path = '../../../build/bin/server' + context.server_path = '../../../build/bin/llama-server' if 'LLAMA_SERVER_BIN_PATH' in os.environ: context.server_path = os.environ['LLAMA_SERVER_BIN_PATH'] server_listen_addr = context.server_fqdn diff --git a/examples/simple/CMakeLists.txt b/examples/simple/CMakeLists.txt index 7da5ff6f3..070cfbe7a 100644 --- a/examples/simple/CMakeLists.txt +++ b/examples/simple/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET simple) +set(TARGET llama-simple) add_executable(${TARGET} simple.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/speculative/CMakeLists.txt b/examples/speculative/CMakeLists.txt index 810f3c46a..aa208e7aa 100644 --- a/examples/speculative/CMakeLists.txt +++ b/examples/speculative/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET speculative) +set(TARGET llama-speculative) add_executable(${TARGET} speculative.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/sycl/CMakeLists.txt b/examples/sycl/CMakeLists.txt index 69cf8932e..e4d5083e6 100644 --- a/examples/sycl/CMakeLists.txt +++ b/examples/sycl/CMakeLists.txt @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: MIT -set(TARGET ls-sycl-device) +set(TARGET llama-ls-sycl-device) add_executable(${TARGET} ls-sycl-device.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/sycl/README.md b/examples/sycl/README.md index c589c2d3a..0e3acd35b 100644 --- a/examples/sycl/README.md +++ b/examples/sycl/README.md @@ -6,9 +6,9 @@ This example program provides the tools for llama.cpp for SYCL on Intel GPU. |Tool Name| Function|Status| |-|-|-| -|ls-sycl-device| List all SYCL devices with ID, compute capability, max work group size, ect.|Support| +|llama-ls-sycl-device| List all SYCL devices with ID, compute capability, max work group size, ect.|Support| -### ls-sycl-device +### llama-ls-sycl-device List all SYCL devices with ID, compute capability, max work group size, ect. @@ -23,7 +23,7 @@ source /opt/intel/oneapi/setvars.sh 3. Execute ``` -./build/bin/ls-sycl-device +./build/bin/llama-ls-sycl-device ``` Check the ID in startup log, like: diff --git a/examples/sycl/run-llama2.sh b/examples/sycl/run-llama2.sh index 7b39a18c0..da0e4aaba 100755 --- a/examples/sycl/run-llama2.sh +++ b/examples/sycl/run-llama2.sh @@ -23,15 +23,15 @@ fi if [ $GGML_SYCL_SINGLE_GPU -eq 1 ]; then echo "use $GGML_SYCL_DEVICE as main GPU" #use signle GPU only - ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -mg $GGML_SYCL_DEVICE -sm none + ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -mg $GGML_SYCL_DEVICE -sm none else #use multiple GPUs with same max compute units - ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 + ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 fi #use main GPU only -#ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -mg $GGML_SYCL_DEVICE -sm none +#ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -mg $GGML_SYCL_DEVICE -sm none #use multiple GPUs with same max compute units -#ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 +#ZES_ENABLE_SYSMAN=1 ./build/bin/llama-cli -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 diff --git a/examples/tokenize/CMakeLists.txt b/examples/tokenize/CMakeLists.txt index 5e6654d7e..b704dcae1 100644 --- a/examples/tokenize/CMakeLists.txt +++ b/examples/tokenize/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET tokenize) +set(TARGET llama-tokenize) add_executable(${TARGET} tokenize.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/train-text-from-scratch/CMakeLists.txt b/examples/train-text-from-scratch/CMakeLists.txt index 4459516d0..9a1d2a35e 100644 --- a/examples/train-text-from-scratch/CMakeLists.txt +++ b/examples/train-text-from-scratch/CMakeLists.txt @@ -1,4 +1,4 @@ -set(TARGET train-text-from-scratch) +set(TARGET llama-train-text-from-scratch) add_executable(${TARGET} train-text-from-scratch.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) diff --git a/examples/train-text-from-scratch/README.md b/examples/train-text-from-scratch/README.md index 1b3454069..3abae2380 100644 --- a/examples/train-text-from-scratch/README.md +++ b/examples/train-text-from-scratch/README.md @@ -7,7 +7,7 @@ Basic usage instructions: wget https://raw.githubusercontent.com/brunoklein99/deep-learning-notes/master/shakespeare.txt # train -./bin/train-text-from-scratch \ +./bin/llama-train-text-from-scratch \ --vocab-model ../models/ggml-vocab-llama.gguf \ --ctx 64 --embd 256 --head 8 --layer 16 \ --checkpoint-in chk-shakespeare-256x16-LATEST.gguf \ @@ -18,7 +18,7 @@ wget https://raw.githubusercontent.com/brunoklein99/deep-learning-notes/master/s --no-checkpointing # predict -./bin/main -m ggml-shakespeare-256x16-f32.gguf +./bin/llama-cli -m ggml-shakespeare-256x16-f32.gguf ``` Output files will be saved every N iterations (config with `--save-every N`). diff --git a/flake.nix b/flake.nix index 0a52ea52e..c69637d11 100644 --- a/flake.nix +++ b/flake.nix @@ -63,7 +63,7 @@ # nix-repl> :lf github:ggerganov/llama.cpp # Added 13 variables. # nix-repl> outputs.apps.x86_64-linux.quantize - # { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/quantize"; type = "app"; } + # { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/llama-quantize"; type = "app"; } # ``` outputs = { self, flake-parts, ... }@inputs: diff --git a/grammars/README.md b/grammars/README.md index 2ec21a4c0..2f685eb6d 100644 --- a/grammars/README.md +++ b/grammars/README.md @@ -91,7 +91,7 @@ item ::= [^\n]+ "\n" This guide provides a brief overview. Check out the GBNF files in this directory (`grammars/`) for examples of full grammars. You can try them out with: ``` -./main -m --grammar-file grammars/some-grammar.gbnf -p 'Some prompt' +./llama-cli -m --grammar-file grammars/some-grammar.gbnf -p 'Some prompt' ``` `llama.cpp` can also convert JSON schemas to grammars either ahead of time or at each request, see below. @@ -110,20 +110,20 @@ While semantically correct, the syntax `x? x? x?.... x?` (with N repetitions) ma You can use GBNF grammars: -- In the [server](../examples/server)'s completion endpoints, passed as the `grammar` body field -- In the [main](../examples/main) CLI, passed as the `--grammar` & `--grammar-file` flags -- With the [gbnf-validator](../examples/gbnf-validator) tool, to test them against strings. +- In [llama-server](../examples/server)'s completion endpoints, passed as the `grammar` body field +- In [llama-cli](../examples/main), passed as the `--grammar` & `--grammar-file` flags +- With [llama-gbnf-validator](../examples/gbnf-validator) tool, to test them against strings. ## JSON Schemas → GBNF `llama.cpp` supports converting a subset of https://json-schema.org/ to GBNF grammars: -- In the [server](../examples/server): +- In [llama-server](../examples/server): - For any completion endpoints, passed as the `json_schema` body field - For the `/chat/completions` endpoint, passed inside the `result_format` body field (e.g. `{"type", "json_object", "schema": {"items": {}}}`) -- In the [main](../examples/main) CLI, passed as the `--json` / `-j` flag +- In [llama-cli](../examples/main), passed as the `--json` / `-j` flag - To convert to a grammar ahead of time: - - in CLI, with [json_schema_to_grammar.py](../examples/json_schema_to_grammar.py) + - in CLI, with [examples/json_schema_to_grammar.py](../examples/json_schema_to_grammar.py) - in JavaScript with [json-schema-to-grammar.mjs](../examples/server/public/json-schema-to-grammar.mjs) (this is used by the [server](../examples/server)'s Web UI) Take a look at [tests](../../tests/test-json-schema-to-grammar.cpp) to see which features are likely supported (you'll also find usage examples in https://github.com/ggerganov/llama.cpp/pull/5978, https://github.com/ggerganov/llama.cpp/pull/6659 & https://github.com/ggerganov/llama.cpp/pull/6555). diff --git a/pocs/vdot/CMakeLists.txt b/pocs/vdot/CMakeLists.txt index fb89a1cd4..d5405ad29 100644 --- a/pocs/vdot/CMakeLists.txt +++ b/pocs/vdot/CMakeLists.txt @@ -1,9 +1,9 @@ -set(TARGET vdot) +set(TARGET llama-vdot) add_executable(${TARGET} vdot.cpp) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -set(TARGET q8dot) +set(TARGET llama-q8dot) add_executable(${TARGET} q8dot.cpp) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/scripts/get-hellaswag.sh b/scripts/get-hellaswag.sh index 121979fe2..4e1b1cc15 100755 --- a/scripts/get-hellaswag.sh +++ b/scripts/get-hellaswag.sh @@ -4,7 +4,7 @@ wget https://raw.githubusercontent.com/klosax/hellaswag_text_data/main/hellaswag echo "Usage:" echo "" -echo " ./perplexity -m model.gguf -f hellaswag_val_full.txt --hellaswag [--hellaswag-tasks N] [other params]" +echo " ./llama-perplexity -m model.gguf -f hellaswag_val_full.txt --hellaswag [--hellaswag-tasks N] [other params]" echo "" exit 0 diff --git a/scripts/get-wikitext-103.sh b/scripts/get-wikitext-103.sh index 880dd5cbe..9c65fafbc 100755 --- a/scripts/get-wikitext-103.sh +++ b/scripts/get-wikitext-103.sh @@ -4,7 +4,7 @@ wget https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-103-raw-v1. echo "Usage:" echo "" -echo " ./perplexity -m model.gguf -f wiki.test.raw [other params]" +echo " ./llama-perplexity -m model.gguf -f wiki.test.raw [other params]" echo "" exit 0 diff --git a/scripts/get-wikitext-2.sh b/scripts/get-wikitext-2.sh index b01476a46..5f3845ef5 100755 --- a/scripts/get-wikitext-2.sh +++ b/scripts/get-wikitext-2.sh @@ -5,7 +5,7 @@ unzip wikitext-2-raw-v1.zip echo "Usage:" echo "" -echo " ./perplexity -m model.gguf -f wikitext-2-raw/wiki.test.raw [other params]" +echo " ./llama-perplexity -m model.gguf -f wikitext-2-raw/wiki.test.raw [other params]" echo "" exit 0 diff --git a/scripts/get-winogrande.sh b/scripts/get-winogrande.sh index 5f234468e..f1fc0e2d4 100755 --- a/scripts/get-winogrande.sh +++ b/scripts/get-winogrande.sh @@ -4,7 +4,7 @@ wget https://huggingface.co/datasets/ikawrakow/winogrande-eval-for-llama.cpp/raw echo "Usage:" echo "" -echo " ./perplexity -m model.gguf -f winogrande-debiased-eval.csv --winogrande [--winogrande-tasks N] [other params]" +echo " ./llama-perplexity -m model.gguf -f winogrande-debiased-eval.csv --winogrande [--winogrande-tasks N] [other params]" echo "" exit 0 diff --git a/scripts/hf.sh b/scripts/hf.sh index 58f83d6fe..85c2c4d9a 100755 --- a/scripts/hf.sh +++ b/scripts/hf.sh @@ -3,9 +3,9 @@ # Shortcut for downloading HF models # # Usage: -# ./main -m $(./scripts/hf.sh https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) -# ./main -m $(./scripts/hf.sh --url https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/blob/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) -# ./main -m $(./scripts/hf.sh --repo TheBloke/Mixtral-8x7B-v0.1-GGUF --file mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./llama-cli -m $(./scripts/hf.sh https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./llama-cli -m $(./scripts/hf.sh --url https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/blob/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./llama-cli -m $(./scripts/hf.sh --repo TheBloke/Mixtral-8x7B-v0.1-GGUF --file mixtral-8x7b-v0.1.Q4_K_M.gguf) # # all logs go to stderr diff --git a/scripts/pod-llama.sh b/scripts/pod-llama.sh index 5dabbf60e..6ba499a2a 100644 --- a/scripts/pod-llama.sh +++ b/scripts/pod-llama.sh @@ -77,9 +77,9 @@ if [ "$1" -eq "1" ]; then python3 examples/convert-legacy-llama.py ./models/tinyllama-1b --outfile ./models/tinyllama-1b/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_k.gguf q4_k - ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "2" ]; then @@ -92,9 +92,9 @@ if [ "$1" -eq "2" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-7b --outfile ./models/codellama-7b/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "3" ]; then @@ -107,9 +107,9 @@ if [ "$1" -eq "3" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-13b --outfile ./models/codellama-13b/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "4" ]; then @@ -122,9 +122,9 @@ if [ "$1" -eq "4" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-34b --outfile ./models/codellama-34b/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "5" ]; then @@ -137,9 +137,9 @@ if [ "$1" -eq "5" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-7b-instruct --outfile ./models/codellama-7b-instruct/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "6" ]; then @@ -152,9 +152,9 @@ if [ "$1" -eq "6" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-13b-instruct --outfile ./models/codellama-13b-instruct/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "7" ]; then @@ -167,9 +167,9 @@ if [ "$1" -eq "7" ]; then python3 examples/convert-legacy-llama.py ./models/codellama-34b-instruct --outfile ./models/codellama-34b-instruct/ggml-model-f16.gguf --outtype f16 - ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_0.gguf q4_0 - ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_k.gguf q4_k - ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q8_0.gguf q8_0 + ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_0.gguf q4_0 + ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_k.gguf q4_k + ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q8_0.gguf q8_0 fi if [ "$1" -eq "1" ]; then @@ -181,22 +181,22 @@ if [ "$1" -eq "1" ]; then ../scripts/get-wikitext-2.sh unzip wikitext-2-raw-v1.zip - make -j && ./bin/perplexity -m ../models/tinyllama-1b/ggml-model-f16.gguf -f ./wikitext-2-raw/wiki.test.raw -ngl 100 --chunks 32 + make -j && ./bin/llama-perplexity -m ../models/tinyllama-1b/ggml-model-f16.gguf -f ./wikitext-2-raw/wiki.test.raw -ngl 100 --chunks 32 # batched cd /workspace/llama.cpp - LLAMA_CUDA=1 make -j && ./batched ./models/tinyllama-1b/ggml-model-f16.gguf "Hello, my name is" 8 128 999 + LLAMA_CUDA=1 make -j && ./llama-batched ./models/tinyllama-1b/ggml-model-f16.gguf "Hello, my name is" 8 128 999 # batched-bench cd /workspace/llama.cpp - LLAMA_CUDA=1 make -j && ./batched-bench ./models/tinyllama-1b/ggml-model-f16.gguf 4608 1 99 0 512 128 1,2,3,4,5,6,7,8,16,32 + LLAMA_CUDA=1 make -j && ./llama-batched-bench ./models/tinyllama-1b/ggml-model-f16.gguf 4608 1 99 0 512 128 1,2,3,4,5,6,7,8,16,32 # parallel cd /workspace/llama.cpp - LLAMA_CUDA=1 make -j && ./parallel -m ./models/tinyllama-1b/ggml-model-f16.gguf -t 1 -ngl 100 -c 4096 -b 512 -s 1 -np 8 -ns 128 -n 100 -cb + LLAMA_CUDA=1 make -j && ./llama-parallel -m ./models/tinyllama-1b/ggml-model-f16.gguf -t 1 -ngl 100 -c 4096 -b 512 -s 1 -np 8 -ns 128 -n 100 -cb fi @@ -204,10 +204,10 @@ fi #if [ "$1" -eq "7" ]; then # cd /workspace/llama.cpp # -# LLAMA_CUDA=1 make -j && ./speculative -m ./models/codellama-34b-instruct/ggml-model-f16.gguf -md ./models/codellama-7b-instruct/ggml-model-q4_0.gguf -p "# Dijkstra's shortest path algorithm in Python (4 spaces indentation) + complexity analysis:\n\n" -e -ngl 999 -ngld 999 -t 4 -n 512 -c 4096 -s 21 --draft 16 -np 1 --temp 0.0 +# LLAMA_CUDA=1 make -j && ./llama-speculative -m ./models/codellama-34b-instruct/ggml-model-f16.gguf -md ./models/codellama-7b-instruct/ggml-model-q4_0.gguf -p "# Dijkstra's shortest path algorithm in Python (4 spaces indentation) + complexity analysis:\n\n" -e -ngl 999 -ngld 999 -t 4 -n 512 -c 4096 -s 21 --draft 16 -np 1 --temp 0.0 #fi # more benches -#LLAMA_CUDA=1 make -j && ./batched-bench ./models/codellama-7b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 -#LLAMA_CUDA=1 make -j && ./batched-bench ./models/codellama-13b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 +#LLAMA_CUDA=1 make -j && ./llama-batched-bench ./models/codellama-7b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 +#LLAMA_CUDA=1 make -j && ./llama-batched-bench ./models/codellama-13b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 diff --git a/scripts/qnt-all.sh b/scripts/qnt-all.sh index b4c2a159e..bc43738a2 100755 --- a/scripts/qnt-all.sh +++ b/scripts/qnt-all.sh @@ -26,5 +26,5 @@ set -e mkdir -p ${out} for q in ${qnt[@]}; do - time ./bin/quantize ../models/${model}/ggml-model-f16.gguf ../models/${model}/ggml-model-${q}.gguf ${q} 2>&1 ${args} | tee ${out}/qnt-${q}.txt + time ./bin/llama-quantize ../models/${model}/ggml-model-f16.gguf ../models/${model}/ggml-model-${q}.gguf ${q} 2>&1 ${args} | tee ${out}/qnt-${q}.txt done diff --git a/scripts/run-all-ppl.sh b/scripts/run-all-ppl.sh index e04d61d7f..e15f74f1b 100755 --- a/scripts/run-all-ppl.sh +++ b/scripts/run-all-ppl.sh @@ -26,5 +26,5 @@ out="../tmp/results-${model}" mkdir -p ${out} for q in ${qnt[@]}; do - time ./bin/perplexity -m ../models/${model}/ggml-model-f16.gguf -f ./wiki.test.raw ${args} 2>&1 | tee ${out}/ppl-${q}.txt + time ./bin/llama-perplexity -m ../models/${model}/ggml-model-f16.gguf -f ./wiki.test.raw ${args} 2>&1 | tee ${out}/ppl-${q}.txt done diff --git a/scripts/run-with-preset.py b/scripts/run-with-preset.py index 0d7219113..ee21eab37 100755 --- a/scripts/run-with-preset.py +++ b/scripts/run-with-preset.py @@ -10,7 +10,7 @@ import yaml logger = logging.getLogger("run-with-preset") -CLI_ARGS_MAIN_PERPLEXITY = [ +CLI_ARGS_LLAMA_CLI_PERPLEXITY = [ "batch-size", "cfg-negative-prompt", "cfg-scale", "chunks", "color", "ctx-size", "escape", "export", "file", "frequency-penalty", "grammar", "grammar-file", "hellaswag", "hellaswag-tasks", "ignore-eos", "in-prefix", "in-prefix-bos", "in-suffix", @@ -29,7 +29,7 @@ CLI_ARGS_LLAMA_BENCH = [ "n-prompt", "output", "repetitions", "tensor-split", "threads", "verbose" ] -CLI_ARGS_SERVER = [ +CLI_ARGS_LLAMA_SERVER = [ "alias", "batch-size", "ctx-size", "embedding", "host", "memory-f32", "lora", "lora-base", "low-vram", "main-gpu", "mlock", "model", "n-gpu-layers", "n-probs", "no-mmap", "no-mul-mat-q", "numa", "path", "port", "rope-freq-base", "timeout", "rope-freq-scale", "tensor-split", @@ -37,7 +37,7 @@ CLI_ARGS_SERVER = [ ] description = """Run llama.cpp binaries with presets from YAML file(s). -To specify which binary should be run, specify the "binary" property (main, perplexity, llama-bench, and server are supported). +To specify which binary should be run, specify the "binary" property (llama-cli, llama-perplexity, llama-bench, and llama-server are supported). To get a preset file template, run a llama.cpp binary with the "--logdir" CLI argument. Formatting considerations: @@ -77,19 +77,19 @@ for yaml_file in known_args.yaml_files: props = {prop.replace("_", "-"): val for prop, val in props.items()} -binary = props.pop("binary", "main") +binary = props.pop("binary", "llama-cli") if known_args.binary: binary = known_args.binary if os.path.exists(f"./{binary}"): binary = f"./{binary}" -if binary.lower().endswith("main") or binary.lower().endswith("perplexity"): - cli_args = CLI_ARGS_MAIN_PERPLEXITY +if binary.lower().endswith("llama-cli") or binary.lower().endswith("llama-perplexity"): + cli_args = CLI_ARGS_LLAMA_CLI_PERPLEXITY elif binary.lower().endswith("llama-bench"): cli_args = CLI_ARGS_LLAMA_BENCH -elif binary.lower().endswith("server"): - cli_args = CLI_ARGS_SERVER +elif binary.lower().endswith("llama-server"): + cli_args = CLI_ARGS_LLAMA_SERVER else: logger.error(f"Unknown binary: {binary}") sys.exit(1) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index b3715e204..199232440 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -380,13 +380,13 @@ fi if [[ "$backend" == "cuda" ]]; then printf "[+] Building with CUDA backend\n" - LLAMA_CUDA=1 make -j server $log + LLAMA_CUDA=1 make -j llama-server $log elif [[ "$backend" == "cpu" ]]; then printf "[+] Building with CPU backend\n" - make -j server $log + make -j llama-server $log elif [[ "$backend" == "metal" ]]; then printf "[+] Building with Metal backend\n" - make -j server $log + make -j llama-server $log else printf "[-] Unknown backend: %s\n" "$backend" exit 1 @@ -413,6 +413,6 @@ if [[ $verbose -eq 1 ]]; then args="$args --verbose" fi -./server -m "../$wfile" --host 0.0.0.0 --port "$port" -c $n_kv -np "$n_parallel" $args +./llama-server -m "../$wfile" --host 0.0.0.0 --port "$port" -c $n_kv -np "$n_parallel" $args exit 0